blob: df81a4842444b2f6c2beabed7ad14f78842cf4bc [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +010099#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Jason Lee8ce36fa2017-04-19 19:40:37 -0700116/* Face rect indices */
117#define FACE_LEFT 0
118#define FACE_TOP 1
119#define FACE_RIGHT 2
120#define FACE_BOTTOM 3
121#define FACE_WEIGHT 4
122
Thierry Strudel04e026f2016-10-10 11:27:36 -0700123/* Face landmarks indices */
124#define LEFT_EYE_X 0
125#define LEFT_EYE_Y 1
126#define RIGHT_EYE_X 2
127#define RIGHT_EYE_Y 3
128#define MOUTH_X 4
129#define MOUTH_Y 5
130#define TOTAL_LANDMARK_INDICES 6
131
Zhijun He2a5df222017-04-04 18:20:38 -0700132// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700133#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700134
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700135// Whether to check for the GPU stride padding, or use the default
136//#define CHECK_GPU_PIXEL_ALIGNMENT
137
Thierry Strudel3d639192016-09-09 11:52:26 -0700138cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
139const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
140extern pthread_mutex_t gCamLock;
141volatile uint32_t gCamHal3LogLevel = 1;
142extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700146std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
148std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
149bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700150std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700157std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100459 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800460 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mChannelHandle(0),
462 mFirstConfiguration(true),
463 mFlush(false),
464 mFlushPerf(false),
465 mParamHeap(NULL),
466 mParameters(NULL),
467 mPrevParameters(NULL),
468 m_bIsVideo(false),
469 m_bIs4KVideo(false),
470 m_bEisSupportedSize(false),
471 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800472 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700473 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700474 mShutterDispatcher(this),
475 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700476 mMinProcessedFrameDuration(0),
477 mMinJpegFrameDuration(0),
478 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100479 mExpectedFrameDuration(0),
480 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700508 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800509 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700511 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700512 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700513 mState(CLOSED),
514 mIsDeviceLinked(false),
515 mIsMainCamera(true),
516 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700517 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800518 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800519 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700520 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800521 mIsApInputUsedForHdrPlus(false),
522 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700523 m_bSensorHDREnabled(false),
524 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700525{
526 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCommon.init(gCamCapability[cameraId]);
528 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700529#ifndef USE_HAL_3_3
530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
531#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCameraDevice.common.close = close_camera_device;
535 mCameraDevice.ops = &mCameraOps;
536 mCameraDevice.priv = this;
537 gCamCapability[cameraId]->version = CAM_HAL_V3;
538 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
539 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
540 gCamCapability[cameraId]->min_num_pp_bufs = 3;
541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800544 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700545 mPendingLiveRequest = 0;
546 mCurrentRequestId = -1;
547 pthread_mutex_init(&mMutex, NULL);
548
549 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
550 mDefaultMetadata[i] = NULL;
551
552 // Getting system props of different kinds
553 char prop[PROPERTY_VALUE_MAX];
554 memset(prop, 0, sizeof(prop));
555 property_get("persist.camera.raw.dump", prop, "0");
556 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800557 property_get("persist.camera.hal3.force.hdr", prop, "0");
558 mForceHdrSnapshot = atoi(prop);
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 if (mEnableRawDump)
561 LOGD("Raw dump from Camera HAL enabled");
562
563 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
564 memset(mLdafCalib, 0, sizeof(mLdafCalib));
565
566 memset(prop, 0, sizeof(prop));
567 property_get("persist.camera.tnr.preview", prop, "0");
568 m_bTnrPreview = (uint8_t)atoi(prop);
569
570 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800571 property_get("persist.camera.swtnr.preview", prop, "1");
572 m_bSwTnrPreview = (uint8_t)atoi(prop);
573
574 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700575 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 m_bTnrVideo = (uint8_t)atoi(prop);
577
578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.avtimer.debug", prop, "0");
580 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800581 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700582
Thierry Strudel54dc9782017-02-15 12:12:10 -0800583 memset(prop, 0, sizeof(prop));
584 property_get("persist.camera.cacmode.disable", prop, "0");
585 m_cacModeDisabled = (uint8_t)atoi(prop);
586
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700587 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700588 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700589
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 //Load and read GPU library.
591 lib_surface_utils = NULL;
592 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700593 mSurfaceStridePadding = CAM_PAD_TO_64;
594#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700595 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
596 if (lib_surface_utils) {
597 *(void **)&LINK_get_surface_pixel_alignment =
598 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
599 if (LINK_get_surface_pixel_alignment) {
600 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
601 }
602 dlclose(lib_surface_utils);
603 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700604#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000605 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
606 mPDSupported = (0 <= mPDIndex) ? true : false;
607
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700608 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700609}
610
611/*===========================================================================
612 * FUNCTION : ~QCamera3HardwareInterface
613 *
614 * DESCRIPTION: destructor of QCamera3HardwareInterface
615 *
616 * PARAMETERS : none
617 *
618 * RETURN : none
619 *==========================================================================*/
620QCamera3HardwareInterface::~QCamera3HardwareInterface()
621{
622 LOGD("E");
623
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800624 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700625
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800626 // Disable power hint and enable the perf lock for close camera
627 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
628 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
629
630 // unlink of dualcam during close camera
631 if (mIsDeviceLinked) {
632 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
633 &m_pDualCamCmdPtr->bundle_info;
634 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
635 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
636 pthread_mutex_lock(&gCamLock);
637
638 if (mIsMainCamera == 1) {
639 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
640 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
641 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
642 // related session id should be session id of linked session
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 } else {
645 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
646 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
647 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
648 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
649 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800650 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800651 pthread_mutex_unlock(&gCamLock);
652
653 rc = mCameraHandle->ops->set_dual_cam_cmd(
654 mCameraHandle->camera_handle);
655 if (rc < 0) {
656 LOGE("Dualcam: Unlink failed, but still proceed to close");
657 }
658 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700659
660 /* We need to stop all streams before deleting any stream */
661 if (mRawDumpChannel) {
662 mRawDumpChannel->stop();
663 }
664
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700665 if (mHdrPlusRawSrcChannel) {
666 mHdrPlusRawSrcChannel->stop();
667 }
668
Thierry Strudel3d639192016-09-09 11:52:26 -0700669 // NOTE: 'camera3_stream_t *' objects are already freed at
670 // this stage by the framework
671 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
672 it != mStreamInfo.end(); it++) {
673 QCamera3ProcessingChannel *channel = (*it)->channel;
674 if (channel) {
675 channel->stop();
676 }
677 }
678 if (mSupportChannel)
679 mSupportChannel->stop();
680
681 if (mAnalysisChannel) {
682 mAnalysisChannel->stop();
683 }
684 if (mMetadataChannel) {
685 mMetadataChannel->stop();
686 }
687 if (mChannelHandle) {
688 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700689 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700690 LOGD("stopping channel %d", mChannelHandle);
691 }
692
693 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
694 it != mStreamInfo.end(); it++) {
695 QCamera3ProcessingChannel *channel = (*it)->channel;
696 if (channel)
697 delete channel;
698 free (*it);
699 }
700 if (mSupportChannel) {
701 delete mSupportChannel;
702 mSupportChannel = NULL;
703 }
704
705 if (mAnalysisChannel) {
706 delete mAnalysisChannel;
707 mAnalysisChannel = NULL;
708 }
709 if (mRawDumpChannel) {
710 delete mRawDumpChannel;
711 mRawDumpChannel = NULL;
712 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700713 if (mHdrPlusRawSrcChannel) {
714 delete mHdrPlusRawSrcChannel;
715 mHdrPlusRawSrcChannel = NULL;
716 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700717 if (mDummyBatchChannel) {
718 delete mDummyBatchChannel;
719 mDummyBatchChannel = NULL;
720 }
721
722 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800723 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700724
725 if (mMetadataChannel) {
726 delete mMetadataChannel;
727 mMetadataChannel = NULL;
728 }
729
730 /* Clean up all channels */
731 if (mCameraInitialized) {
732 if(!mFirstConfiguration){
733 //send the last unconfigure
734 cam_stream_size_info_t stream_config_info;
735 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
736 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
737 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800738 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700739 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700740 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700741 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
742 stream_config_info);
743 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
744 if (rc < 0) {
745 LOGE("set_parms failed for unconfigure");
746 }
747 }
748 deinitParameters();
749 }
750
751 if (mChannelHandle) {
752 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
753 mChannelHandle);
754 LOGH("deleting channel %d", mChannelHandle);
755 mChannelHandle = 0;
756 }
757
758 if (mState != CLOSED)
759 closeCamera();
760
761 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
762 req.mPendingBufferList.clear();
763 }
764 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700765 for (pendingRequestIterator i = mPendingRequestsList.begin();
766 i != mPendingRequestsList.end();) {
767 i = erasePendingRequest(i);
768 }
769 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
770 if (mDefaultMetadata[i])
771 free_camera_metadata(mDefaultMetadata[i]);
772
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800773 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700774
775 pthread_cond_destroy(&mRequestCond);
776
777 pthread_cond_destroy(&mBuffersCond);
778
779 pthread_mutex_destroy(&mMutex);
780 LOGD("X");
781}
782
783/*===========================================================================
784 * FUNCTION : erasePendingRequest
785 *
786 * DESCRIPTION: function to erase a desired pending request after freeing any
787 * allocated memory
788 *
789 * PARAMETERS :
790 * @i : iterator pointing to pending request to be erased
791 *
792 * RETURN : iterator pointing to the next request
793 *==========================================================================*/
794QCamera3HardwareInterface::pendingRequestIterator
795 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
796{
797 if (i->input_buffer != NULL) {
798 free(i->input_buffer);
799 i->input_buffer = NULL;
800 }
801 if (i->settings != NULL)
802 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100803
804 mExpectedInflightDuration -= i->expectedFrameDuration;
805 if (mExpectedInflightDuration < 0) {
806 LOGE("Negative expected in-flight duration!");
807 mExpectedInflightDuration = 0;
808 }
809
Thierry Strudel3d639192016-09-09 11:52:26 -0700810 return mPendingRequestsList.erase(i);
811}
812
813/*===========================================================================
814 * FUNCTION : camEvtHandle
815 *
816 * DESCRIPTION: Function registered to mm-camera-interface to handle events
817 *
818 * PARAMETERS :
819 * @camera_handle : interface layer camera handle
820 * @evt : ptr to event
821 * @user_data : user data ptr
822 *
823 * RETURN : none
824 *==========================================================================*/
825void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
826 mm_camera_event_t *evt,
827 void *user_data)
828{
829 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
830 if (obj && evt) {
831 switch(evt->server_event_type) {
832 case CAM_EVENT_TYPE_DAEMON_DIED:
833 pthread_mutex_lock(&obj->mMutex);
834 obj->mState = ERROR;
835 pthread_mutex_unlock(&obj->mMutex);
836 LOGE("Fatal, camera daemon died");
837 break;
838
839 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
840 LOGD("HAL got request pull from Daemon");
841 pthread_mutex_lock(&obj->mMutex);
842 obj->mWokenUpByDaemon = true;
843 obj->unblockRequestIfNecessary();
844 pthread_mutex_unlock(&obj->mMutex);
845 break;
846
847 default:
848 LOGW("Warning: Unhandled event %d",
849 evt->server_event_type);
850 break;
851 }
852 } else {
853 LOGE("NULL user_data/evt");
854 }
855}
856
857/*===========================================================================
858 * FUNCTION : openCamera
859 *
860 * DESCRIPTION: open camera
861 *
862 * PARAMETERS :
863 * @hw_device : double ptr for camera device struct
864 *
865 * RETURN : int32_t type of status
866 * NO_ERROR -- success
867 * none-zero failure code
868 *==========================================================================*/
869int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
870{
871 int rc = 0;
872 if (mState != CLOSED) {
873 *hw_device = NULL;
874 return PERMISSION_DENIED;
875 }
876
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700877 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800878 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700879 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
880 mCameraId);
881
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700882 if (mCameraHandle) {
883 LOGE("Failure: Camera already opened");
884 return ALREADY_EXISTS;
885 }
886
887 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700888 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700889 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700890 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700891 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892 if (rc != 0) {
893 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
894 return rc;
895 }
896 }
897 }
898
Thierry Strudel3d639192016-09-09 11:52:26 -0700899 rc = openCamera();
900 if (rc == 0) {
901 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800902 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700904
905 // Suspend Easel because opening camera failed.
906 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700907 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700908 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
909 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700910 if (suspendErr != 0) {
911 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
912 strerror(-suspendErr), suspendErr);
913 }
914 }
915 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800916 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700917
Thierry Strudel3d639192016-09-09 11:52:26 -0700918 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
919 mCameraId, rc);
920
921 if (rc == NO_ERROR) {
922 mState = OPENED;
923 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800924
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 return rc;
926}
927
928/*===========================================================================
929 * FUNCTION : openCamera
930 *
931 * DESCRIPTION: open camera
932 *
933 * PARAMETERS : none
934 *
935 * RETURN : int32_t type of status
936 * NO_ERROR -- success
937 * none-zero failure code
938 *==========================================================================*/
939int QCamera3HardwareInterface::openCamera()
940{
941 int rc = 0;
942 char value[PROPERTY_VALUE_MAX];
943
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800944 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800945
Thierry Strudel3d639192016-09-09 11:52:26 -0700946 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
947 if (rc < 0) {
948 LOGE("Failed to reserve flash for camera id: %d",
949 mCameraId);
950 return UNKNOWN_ERROR;
951 }
952
953 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
954 if (rc) {
955 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
956 return rc;
957 }
958
959 if (!mCameraHandle) {
960 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
961 return -ENODEV;
962 }
963
964 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
965 camEvtHandle, (void *)this);
966
967 if (rc < 0) {
968 LOGE("Error, failed to register event callback");
969 /* Not closing camera here since it is already handled in destructor */
970 return FAILED_TRANSACTION;
971 }
972
973 mExifParams.debug_params =
974 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
975 if (mExifParams.debug_params) {
976 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
977 } else {
978 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
979 return NO_MEMORY;
980 }
981 mFirstConfiguration = true;
982
983 //Notify display HAL that a camera session is active.
984 //But avoid calling the same during bootup because camera service might open/close
985 //cameras at boot time during its initialization and display service will also internally
986 //wait for camera service to initialize first while calling this display API, resulting in a
987 //deadlock situation. Since boot time camera open/close calls are made only to fetch
988 //capabilities, no need of this display bw optimization.
989 //Use "service.bootanim.exit" property to know boot status.
990 property_get("service.bootanim.exit", value, "0");
991 if (atoi(value) == 1) {
992 pthread_mutex_lock(&gCamLock);
993 if (gNumCameraSessions++ == 0) {
994 setCameraLaunchStatus(true);
995 }
996 pthread_mutex_unlock(&gCamLock);
997 }
998
999 //fill the session id needed while linking dual cam
1000 pthread_mutex_lock(&gCamLock);
1001 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1002 &sessionId[mCameraId]);
1003 pthread_mutex_unlock(&gCamLock);
1004
1005 if (rc < 0) {
1006 LOGE("Error, failed to get sessiion id");
1007 return UNKNOWN_ERROR;
1008 } else {
1009 //Allocate related cam sync buffer
1010 //this is needed for the payload that goes along with bundling cmd for related
1011 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001012 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1013 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc != OK) {
1015 rc = NO_MEMORY;
1016 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1017 return NO_MEMORY;
1018 }
1019
1020 //Map memory for related cam sync buffer
1021 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001022 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1023 m_pDualCamCmdHeap->getFd(0),
1024 sizeof(cam_dual_camera_cmd_info_t),
1025 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001026 if(rc < 0) {
1027 LOGE("Dualcam: failed to map Related cam sync buffer");
1028 rc = FAILED_TRANSACTION;
1029 return NO_MEMORY;
1030 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001031 m_pDualCamCmdPtr =
1032 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001033 }
1034
1035 LOGH("mCameraId=%d",mCameraId);
1036
1037 return NO_ERROR;
1038}
1039
1040/*===========================================================================
1041 * FUNCTION : closeCamera
1042 *
1043 * DESCRIPTION: close camera
1044 *
1045 * PARAMETERS : none
1046 *
1047 * RETURN : int32_t type of status
1048 * NO_ERROR -- success
1049 * none-zero failure code
1050 *==========================================================================*/
1051int QCamera3HardwareInterface::closeCamera()
1052{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001053 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 int rc = NO_ERROR;
1055 char value[PROPERTY_VALUE_MAX];
1056
1057 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1058 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001059
1060 // unmap memory for related cam sync buffer
1061 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001062 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001063 if (NULL != m_pDualCamCmdHeap) {
1064 m_pDualCamCmdHeap->deallocate();
1065 delete m_pDualCamCmdHeap;
1066 m_pDualCamCmdHeap = NULL;
1067 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001068 }
1069
Thierry Strudel3d639192016-09-09 11:52:26 -07001070 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1071 mCameraHandle = NULL;
1072
1073 //reset session id to some invalid id
1074 pthread_mutex_lock(&gCamLock);
1075 sessionId[mCameraId] = 0xDEADBEEF;
1076 pthread_mutex_unlock(&gCamLock);
1077
1078 //Notify display HAL that there is no active camera session
1079 //but avoid calling the same during bootup. Refer to openCamera
1080 //for more details.
1081 property_get("service.bootanim.exit", value, "0");
1082 if (atoi(value) == 1) {
1083 pthread_mutex_lock(&gCamLock);
1084 if (--gNumCameraSessions == 0) {
1085 setCameraLaunchStatus(false);
1086 }
1087 pthread_mutex_unlock(&gCamLock);
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 if (mExifParams.debug_params) {
1091 free(mExifParams.debug_params);
1092 mExifParams.debug_params = NULL;
1093 }
1094 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1095 LOGW("Failed to release flash for camera id: %d",
1096 mCameraId);
1097 }
1098 mState = CLOSED;
1099 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1100 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001101
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001102 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001103 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1104 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001105 if (gHdrPlusClient != nullptr) {
1106 // Disable HDR+ mode.
1107 disableHdrPlusModeLocked();
1108 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001109 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001110 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001111 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001112
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001113 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001114 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001115 if (rc != 0) {
1116 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1117 }
1118
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001119 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001120 if (rc != 0) {
1121 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1122 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123 }
1124 }
1125
Thierry Strudel3d639192016-09-09 11:52:26 -07001126 return rc;
1127}
1128
1129/*===========================================================================
1130 * FUNCTION : initialize
1131 *
1132 * DESCRIPTION: Initialize frameworks callback functions
1133 *
1134 * PARAMETERS :
1135 * @callback_ops : callback function to frameworks
1136 *
1137 * RETURN :
1138 *
1139 *==========================================================================*/
1140int QCamera3HardwareInterface::initialize(
1141 const struct camera3_callback_ops *callback_ops)
1142{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001143 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001144 int rc;
1145
1146 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1147 pthread_mutex_lock(&mMutex);
1148
1149 // Validate current state
1150 switch (mState) {
1151 case OPENED:
1152 /* valid state */
1153 break;
1154 default:
1155 LOGE("Invalid state %d", mState);
1156 rc = -ENODEV;
1157 goto err1;
1158 }
1159
1160 rc = initParameters();
1161 if (rc < 0) {
1162 LOGE("initParamters failed %d", rc);
1163 goto err1;
1164 }
1165 mCallbackOps = callback_ops;
1166
1167 mChannelHandle = mCameraHandle->ops->add_channel(
1168 mCameraHandle->camera_handle, NULL, NULL, this);
1169 if (mChannelHandle == 0) {
1170 LOGE("add_channel failed");
1171 rc = -ENOMEM;
1172 pthread_mutex_unlock(&mMutex);
1173 return rc;
1174 }
1175
1176 pthread_mutex_unlock(&mMutex);
1177 mCameraInitialized = true;
1178 mState = INITIALIZED;
1179 LOGI("X");
1180 return 0;
1181
1182err1:
1183 pthread_mutex_unlock(&mMutex);
1184 return rc;
1185}
1186
1187/*===========================================================================
1188 * FUNCTION : validateStreamDimensions
1189 *
1190 * DESCRIPTION: Check if the configuration requested are those advertised
1191 *
1192 * PARAMETERS :
1193 * @stream_list : streams to be configured
1194 *
1195 * RETURN :
1196 *
1197 *==========================================================================*/
1198int QCamera3HardwareInterface::validateStreamDimensions(
1199 camera3_stream_configuration_t *streamList)
1200{
1201 int rc = NO_ERROR;
1202 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 uint32_t depthWidth = 0;
1204 uint32_t depthHeight = 0;
1205 if (mPDSupported) {
1206 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1207 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001209
1210 camera3_stream_t *inputStream = NULL;
1211 /*
1212 * Loop through all streams to find input stream if it exists*
1213 */
1214 for (size_t i = 0; i< streamList->num_streams; i++) {
1215 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1216 if (inputStream != NULL) {
1217 LOGE("Error, Multiple input streams requested");
1218 return -EINVAL;
1219 }
1220 inputStream = streamList->streams[i];
1221 }
1222 }
1223 /*
1224 * Loop through all streams requested in configuration
1225 * Check if unsupported sizes have been requested on any of them
1226 */
1227 for (size_t j = 0; j < streamList->num_streams; j++) {
1228 bool sizeFound = false;
1229 camera3_stream_t *newStream = streamList->streams[j];
1230
1231 uint32_t rotatedHeight = newStream->height;
1232 uint32_t rotatedWidth = newStream->width;
1233 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1234 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1235 rotatedHeight = newStream->width;
1236 rotatedWidth = newStream->height;
1237 }
1238
1239 /*
1240 * Sizes are different for each type of stream format check against
1241 * appropriate table.
1242 */
1243 switch (newStream->format) {
1244 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1245 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1246 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001247 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1248 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1249 mPDSupported) {
1250 if ((depthWidth == newStream->width) &&
1251 (depthHeight == newStream->height)) {
1252 sizeFound = true;
1253 }
1254 break;
1255 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001256 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1257 for (size_t i = 0; i < count; i++) {
1258 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1259 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1260 sizeFound = true;
1261 break;
1262 }
1263 }
1264 break;
1265 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001266 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1267 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001268 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001269 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001270 if ((depthSamplesCount == newStream->width) &&
1271 (1 == newStream->height)) {
1272 sizeFound = true;
1273 }
1274 break;
1275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1277 /* Verify set size against generated sizes table */
1278 for (size_t i = 0; i < count; i++) {
1279 if (((int32_t)rotatedWidth ==
1280 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1281 ((int32_t)rotatedHeight ==
1282 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1283 sizeFound = true;
1284 break;
1285 }
1286 }
1287 break;
1288 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1289 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1290 default:
1291 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1292 || newStream->stream_type == CAMERA3_STREAM_INPUT
1293 || IS_USAGE_ZSL(newStream->usage)) {
1294 if (((int32_t)rotatedWidth ==
1295 gCamCapability[mCameraId]->active_array_size.width) &&
1296 ((int32_t)rotatedHeight ==
1297 gCamCapability[mCameraId]->active_array_size.height)) {
1298 sizeFound = true;
1299 break;
1300 }
1301 /* We could potentially break here to enforce ZSL stream
1302 * set from frameworks always is full active array size
1303 * but it is not clear from the spc if framework will always
1304 * follow that, also we have logic to override to full array
1305 * size, so keeping the logic lenient at the moment
1306 */
1307 }
1308 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1309 MAX_SIZES_CNT);
1310 for (size_t i = 0; i < count; i++) {
1311 if (((int32_t)rotatedWidth ==
1312 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1313 ((int32_t)rotatedHeight ==
1314 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1315 sizeFound = true;
1316 break;
1317 }
1318 }
1319 break;
1320 } /* End of switch(newStream->format) */
1321
1322 /* We error out even if a single stream has unsupported size set */
1323 if (!sizeFound) {
1324 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1325 rotatedWidth, rotatedHeight, newStream->format,
1326 gCamCapability[mCameraId]->active_array_size.width,
1327 gCamCapability[mCameraId]->active_array_size.height);
1328 rc = -EINVAL;
1329 break;
1330 }
1331 } /* End of for each stream */
1332 return rc;
1333}
1334
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001335/*===========================================================================
1336 * FUNCTION : validateUsageFlags
1337 *
1338 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1339 *
1340 * PARAMETERS :
1341 * @stream_list : streams to be configured
1342 *
1343 * RETURN :
1344 * NO_ERROR if the usage flags are supported
1345 * error code if usage flags are not supported
1346 *
1347 *==========================================================================*/
1348int QCamera3HardwareInterface::validateUsageFlags(
1349 const camera3_stream_configuration_t* streamList)
1350{
1351 for (size_t j = 0; j < streamList->num_streams; j++) {
1352 const camera3_stream_t *newStream = streamList->streams[j];
1353
1354 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1355 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1356 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1357 continue;
1358 }
1359
Jason Leec4cf5032017-05-24 18:31:41 -07001360 // Here we only care whether it's EIS3 or not
1361 char is_type_value[PROPERTY_VALUE_MAX];
1362 property_get("persist.camera.is_type", is_type_value, "4");
1363 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1364 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1365 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1366 isType = IS_TYPE_NONE;
1367
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1369 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1370 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1371 bool forcePreviewUBWC = true;
1372 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1373 forcePreviewUBWC = false;
1374 }
1375 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001376 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001377 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001378 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001379 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001380 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001381
1382 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1383 // So color spaces will always match.
1384
1385 // Check whether underlying formats of shared streams match.
1386 if (isVideo && isPreview && videoFormat != previewFormat) {
1387 LOGE("Combined video and preview usage flag is not supported");
1388 return -EINVAL;
1389 }
1390 if (isPreview && isZSL && previewFormat != zslFormat) {
1391 LOGE("Combined preview and zsl usage flag is not supported");
1392 return -EINVAL;
1393 }
1394 if (isVideo && isZSL && videoFormat != zslFormat) {
1395 LOGE("Combined video and zsl usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 }
1399 return NO_ERROR;
1400}
1401
1402/*===========================================================================
1403 * FUNCTION : validateUsageFlagsForEis
1404 *
1405 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1406 *
1407 * PARAMETERS :
1408 * @stream_list : streams to be configured
1409 *
1410 * RETURN :
1411 * NO_ERROR if the usage flags are supported
1412 * error code if usage flags are not supported
1413 *
1414 *==========================================================================*/
1415int QCamera3HardwareInterface::validateUsageFlagsForEis(
1416 const camera3_stream_configuration_t* streamList)
1417{
1418 for (size_t j = 0; j < streamList->num_streams; j++) {
1419 const camera3_stream_t *newStream = streamList->streams[j];
1420
1421 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1422 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1423
1424 // Because EIS is "hard-coded" for certain use case, and current
1425 // implementation doesn't support shared preview and video on the same
1426 // stream, return failure if EIS is forced on.
1427 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1428 LOGE("Combined video and preview usage flag is not supported due to EIS");
1429 return -EINVAL;
1430 }
1431 }
1432 return NO_ERROR;
1433}
1434
Thierry Strudel3d639192016-09-09 11:52:26 -07001435/*==============================================================================
1436 * FUNCTION : isSupportChannelNeeded
1437 *
1438 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1439 *
1440 * PARAMETERS :
1441 * @stream_list : streams to be configured
1442 * @stream_config_info : the config info for streams to be configured
1443 *
1444 * RETURN : Boolen true/false decision
1445 *
1446 *==========================================================================*/
1447bool QCamera3HardwareInterface::isSupportChannelNeeded(
1448 camera3_stream_configuration_t *streamList,
1449 cam_stream_size_info_t stream_config_info)
1450{
1451 uint32_t i;
1452 bool pprocRequested = false;
1453 /* Check for conditions where PProc pipeline does not have any streams*/
1454 for (i = 0; i < stream_config_info.num_streams; i++) {
1455 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1456 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1457 pprocRequested = true;
1458 break;
1459 }
1460 }
1461
1462 if (pprocRequested == false )
1463 return true;
1464
1465 /* Dummy stream needed if only raw or jpeg streams present */
1466 for (i = 0; i < streamList->num_streams; i++) {
1467 switch(streamList->streams[i]->format) {
1468 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1469 case HAL_PIXEL_FORMAT_RAW10:
1470 case HAL_PIXEL_FORMAT_RAW16:
1471 case HAL_PIXEL_FORMAT_BLOB:
1472 break;
1473 default:
1474 return false;
1475 }
1476 }
1477 return true;
1478}
1479
1480/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001483 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001484 *
1485 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 *
1488 * RETURN : int32_t type of status
1489 * NO_ERROR -- success
1490 * none-zero failure code
1491 *
1492 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001493int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001494{
1495 int32_t rc = NO_ERROR;
1496
1497 cam_dimension_t max_dim = {0, 0};
1498 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1499 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1500 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1501 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1502 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1503 }
1504
1505 clear_metadata_buffer(mParameters);
1506
1507 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1508 max_dim);
1509 if (rc != NO_ERROR) {
1510 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1511 return rc;
1512 }
1513
1514 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1515 if (rc != NO_ERROR) {
1516 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1517 return rc;
1518 }
1519
1520 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001521 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001522
1523 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1524 mParameters);
1525 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001526 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 return rc;
1528 }
1529
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001530 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001531 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1532 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1533 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1534 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1535 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001536
1537 return rc;
1538}
1539
1540/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001541 * FUNCTION : getCurrentSensorModeInfo
1542 *
1543 * DESCRIPTION: Get sensor mode information that is currently selected.
1544 *
1545 * PARAMETERS :
1546 * @sensorModeInfo : sensor mode information (output)
1547 *
1548 * RETURN : int32_t type of status
1549 * NO_ERROR -- success
1550 * none-zero failure code
1551 *
1552 *==========================================================================*/
1553int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1554{
1555 int32_t rc = NO_ERROR;
1556
1557 clear_metadata_buffer(mParameters);
1558 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1559
1560 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1561 mParameters);
1562 if (rc != NO_ERROR) {
1563 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1564 return rc;
1565 }
1566
1567 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1568 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1569 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1570 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1571 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1572 sensorModeInfo.num_raw_bits);
1573
1574 return rc;
1575}
1576
1577/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001578 * FUNCTION : addToPPFeatureMask
1579 *
1580 * DESCRIPTION: add additional features to pp feature mask based on
1581 * stream type and usecase
1582 *
1583 * PARAMETERS :
1584 * @stream_format : stream type for feature mask
1585 * @stream_idx : stream idx within postprocess_mask list to change
1586 *
1587 * RETURN : NULL
1588 *
1589 *==========================================================================*/
1590void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1591 uint32_t stream_idx)
1592{
1593 char feature_mask_value[PROPERTY_VALUE_MAX];
1594 cam_feature_mask_t feature_mask;
1595 int args_converted;
1596 int property_len;
1597
1598 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001599#ifdef _LE_CAMERA_
1600 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1601 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1602 property_len = property_get("persist.camera.hal3.feature",
1603 feature_mask_value, swtnr_feature_mask_value);
1604#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001605 property_len = property_get("persist.camera.hal3.feature",
1606 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001608 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1609 (feature_mask_value[1] == 'x')) {
1610 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1611 } else {
1612 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1613 }
1614 if (1 != args_converted) {
1615 feature_mask = 0;
1616 LOGE("Wrong feature mask %s", feature_mask_value);
1617 return;
1618 }
1619
1620 switch (stream_format) {
1621 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1622 /* Add LLVD to pp feature mask only if video hint is enabled */
1623 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1624 mStreamConfigInfo.postprocess_mask[stream_idx]
1625 |= CAM_QTI_FEATURE_SW_TNR;
1626 LOGH("Added SW TNR to pp feature mask");
1627 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1628 mStreamConfigInfo.postprocess_mask[stream_idx]
1629 |= CAM_QCOM_FEATURE_LLVD;
1630 LOGH("Added LLVD SeeMore to pp feature mask");
1631 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001632 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1633 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1634 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1635 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001636 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1637 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1638 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1639 CAM_QTI_FEATURE_BINNING_CORRECTION;
1640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001641 break;
1642 }
1643 default:
1644 break;
1645 }
1646 LOGD("PP feature mask %llx",
1647 mStreamConfigInfo.postprocess_mask[stream_idx]);
1648}
1649
1650/*==============================================================================
1651 * FUNCTION : updateFpsInPreviewBuffer
1652 *
1653 * DESCRIPTION: update FPS information in preview buffer.
1654 *
1655 * PARAMETERS :
1656 * @metadata : pointer to metadata buffer
1657 * @frame_number: frame_number to look for in pending buffer list
1658 *
1659 * RETURN : None
1660 *
1661 *==========================================================================*/
1662void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1663 uint32_t frame_number)
1664{
1665 // Mark all pending buffers for this particular request
1666 // with corresponding framerate information
1667 for (List<PendingBuffersInRequest>::iterator req =
1668 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1669 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1670 for(List<PendingBufferInfo>::iterator j =
1671 req->mPendingBufferList.begin();
1672 j != req->mPendingBufferList.end(); j++) {
1673 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1674 if ((req->frame_number == frame_number) &&
1675 (channel->getStreamTypeMask() &
1676 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1677 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1678 CAM_INTF_PARM_FPS_RANGE, metadata) {
1679 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1680 struct private_handle_t *priv_handle =
1681 (struct private_handle_t *)(*(j->buffer));
1682 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1683 }
1684 }
1685 }
1686 }
1687}
1688
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001689/*==============================================================================
1690 * FUNCTION : updateTimeStampInPendingBuffers
1691 *
1692 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1693 * of a frame number
1694 *
1695 * PARAMETERS :
1696 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1697 * @timestamp : timestamp to be set
1698 *
1699 * RETURN : None
1700 *
1701 *==========================================================================*/
1702void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1703 uint32_t frameNumber, nsecs_t timestamp)
1704{
1705 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1706 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1707 if (req->frame_number != frameNumber)
1708 continue;
1709
1710 for (auto k = req->mPendingBufferList.begin();
1711 k != req->mPendingBufferList.end(); k++ ) {
1712 struct private_handle_t *priv_handle =
1713 (struct private_handle_t *) (*(k->buffer));
1714 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1715 }
1716 }
1717 return;
1718}
1719
Thierry Strudel3d639192016-09-09 11:52:26 -07001720/*===========================================================================
1721 * FUNCTION : configureStreams
1722 *
1723 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1724 * and output streams.
1725 *
1726 * PARAMETERS :
1727 * @stream_list : streams to be configured
1728 *
1729 * RETURN :
1730 *
1731 *==========================================================================*/
1732int QCamera3HardwareInterface::configureStreams(
1733 camera3_stream_configuration_t *streamList)
1734{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001735 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 int rc = 0;
1737
1738 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001739 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001740 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001741 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001742
1743 return rc;
1744}
1745
1746/*===========================================================================
1747 * FUNCTION : configureStreamsPerfLocked
1748 *
1749 * DESCRIPTION: configureStreams while perfLock is held.
1750 *
1751 * PARAMETERS :
1752 * @stream_list : streams to be configured
1753 *
1754 * RETURN : int32_t type of status
1755 * NO_ERROR -- success
1756 * none-zero failure code
1757 *==========================================================================*/
1758int QCamera3HardwareInterface::configureStreamsPerfLocked(
1759 camera3_stream_configuration_t *streamList)
1760{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001761 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001762 int rc = 0;
1763
1764 // Sanity check stream_list
1765 if (streamList == NULL) {
1766 LOGE("NULL stream configuration");
1767 return BAD_VALUE;
1768 }
1769 if (streamList->streams == NULL) {
1770 LOGE("NULL stream list");
1771 return BAD_VALUE;
1772 }
1773
1774 if (streamList->num_streams < 1) {
1775 LOGE("Bad number of streams requested: %d",
1776 streamList->num_streams);
1777 return BAD_VALUE;
1778 }
1779
1780 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1781 LOGE("Maximum number of streams %d exceeded: %d",
1782 MAX_NUM_STREAMS, streamList->num_streams);
1783 return BAD_VALUE;
1784 }
1785
Jason Leec4cf5032017-05-24 18:31:41 -07001786 mOpMode = streamList->operation_mode;
1787 LOGD("mOpMode: %d", mOpMode);
1788
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001789 rc = validateUsageFlags(streamList);
1790 if (rc != NO_ERROR) {
1791 return rc;
1792 }
1793
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 /* first invalidate all the steams in the mStreamList
1795 * if they appear again, they will be validated */
1796 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1797 it != mStreamInfo.end(); it++) {
1798 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1799 if (channel) {
1800 channel->stop();
1801 }
1802 (*it)->status = INVALID;
1803 }
1804
1805 if (mRawDumpChannel) {
1806 mRawDumpChannel->stop();
1807 delete mRawDumpChannel;
1808 mRawDumpChannel = NULL;
1809 }
1810
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001811 if (mHdrPlusRawSrcChannel) {
1812 mHdrPlusRawSrcChannel->stop();
1813 delete mHdrPlusRawSrcChannel;
1814 mHdrPlusRawSrcChannel = NULL;
1815 }
1816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 if (mSupportChannel)
1818 mSupportChannel->stop();
1819
1820 if (mAnalysisChannel) {
1821 mAnalysisChannel->stop();
1822 }
1823 if (mMetadataChannel) {
1824 /* If content of mStreamInfo is not 0, there is metadata stream */
1825 mMetadataChannel->stop();
1826 }
1827 if (mChannelHandle) {
1828 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001829 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 LOGD("stopping channel %d", mChannelHandle);
1831 }
1832
1833 pthread_mutex_lock(&mMutex);
1834
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001835 mPictureChannel = NULL;
1836
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 // Check state
1838 switch (mState) {
1839 case INITIALIZED:
1840 case CONFIGURED:
1841 case STARTED:
1842 /* valid state */
1843 break;
1844 default:
1845 LOGE("Invalid state %d", mState);
1846 pthread_mutex_unlock(&mMutex);
1847 return -ENODEV;
1848 }
1849
1850 /* Check whether we have video stream */
1851 m_bIs4KVideo = false;
1852 m_bIsVideo = false;
1853 m_bEisSupportedSize = false;
1854 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001855 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001857 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001859 uint32_t videoWidth = 0U;
1860 uint32_t videoHeight = 0U;
1861 size_t rawStreamCnt = 0;
1862 size_t stallStreamCnt = 0;
1863 size_t processedStreamCnt = 0;
1864 // Number of streams on ISP encoder path
1865 size_t numStreamsOnEncoder = 0;
1866 size_t numYuv888OnEncoder = 0;
1867 bool bYuv888OverrideJpeg = false;
1868 cam_dimension_t largeYuv888Size = {0, 0};
1869 cam_dimension_t maxViewfinderSize = {0, 0};
1870 bool bJpegExceeds4K = false;
1871 bool bJpegOnEncoder = false;
1872 bool bUseCommonFeatureMask = false;
1873 cam_feature_mask_t commonFeatureMask = 0;
1874 bool bSmallJpegSize = false;
1875 uint32_t width_ratio;
1876 uint32_t height_ratio;
1877 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1878 camera3_stream_t *inputStream = NULL;
1879 bool isJpeg = false;
1880 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001881 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001882 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001883
1884 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1885
1886 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 uint8_t eis_prop_set;
1888 uint32_t maxEisWidth = 0;
1889 uint32_t maxEisHeight = 0;
1890
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001891 // Initialize all instant AEC related variables
1892 mInstantAEC = false;
1893 mResetInstantAEC = false;
1894 mInstantAECSettledFrameNumber = 0;
1895 mAecSkipDisplayFrameBound = 0;
1896 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001897 mCurrFeatureState = 0;
1898 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001899
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1901
1902 size_t count = IS_TYPE_MAX;
1903 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1904 for (size_t i = 0; i < count; i++) {
1905 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001906 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1907 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 break;
1909 }
1910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001911
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001912 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001913 maxEisWidth = MAX_EIS_WIDTH;
1914 maxEisHeight = MAX_EIS_HEIGHT;
1915 }
1916
1917 /* EIS setprop control */
1918 char eis_prop[PROPERTY_VALUE_MAX];
1919 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001920 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001921 eis_prop_set = (uint8_t)atoi(eis_prop);
1922
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001923 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001924 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1925
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001926 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1927 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001928
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 /* stream configurations */
1930 for (size_t i = 0; i < streamList->num_streams; i++) {
1931 camera3_stream_t *newStream = streamList->streams[i];
1932 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1933 "height = %d, rotation = %d, usage = 0x%x",
1934 i, newStream->stream_type, newStream->format,
1935 newStream->width, newStream->height, newStream->rotation,
1936 newStream->usage);
1937 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1938 newStream->stream_type == CAMERA3_STREAM_INPUT){
1939 isZsl = true;
1940 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001941 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1942 IS_USAGE_PREVIEW(newStream->usage)) {
1943 isPreview = true;
1944 }
1945
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1947 inputStream = newStream;
1948 }
1949
Emilian Peev7650c122017-01-19 08:24:33 -08001950 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1951 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001952 isJpeg = true;
1953 jpegSize.width = newStream->width;
1954 jpegSize.height = newStream->height;
1955 if (newStream->width > VIDEO_4K_WIDTH ||
1956 newStream->height > VIDEO_4K_HEIGHT)
1957 bJpegExceeds4K = true;
1958 }
1959
1960 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1961 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1962 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001963 // In HAL3 we can have multiple different video streams.
1964 // The variables video width and height are used below as
1965 // dimensions of the biggest of them
1966 if (videoWidth < newStream->width ||
1967 videoHeight < newStream->height) {
1968 videoWidth = newStream->width;
1969 videoHeight = newStream->height;
1970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1972 (VIDEO_4K_HEIGHT <= newStream->height)) {
1973 m_bIs4KVideo = true;
1974 }
1975 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1976 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977
Thierry Strudel3d639192016-09-09 11:52:26 -07001978 }
1979 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1980 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1981 switch (newStream->format) {
1982 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001983 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1984 depthPresent = true;
1985 break;
1986 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001987 stallStreamCnt++;
1988 if (isOnEncoder(maxViewfinderSize, newStream->width,
1989 newStream->height)) {
1990 numStreamsOnEncoder++;
1991 bJpegOnEncoder = true;
1992 }
1993 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1994 newStream->width);
1995 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1996 newStream->height);;
1997 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1998 "FATAL: max_downscale_factor cannot be zero and so assert");
1999 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2000 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2001 LOGH("Setting small jpeg size flag to true");
2002 bSmallJpegSize = true;
2003 }
2004 break;
2005 case HAL_PIXEL_FORMAT_RAW10:
2006 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2007 case HAL_PIXEL_FORMAT_RAW16:
2008 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002009 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2010 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2011 pdStatCount++;
2012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002013 break;
2014 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2015 processedStreamCnt++;
2016 if (isOnEncoder(maxViewfinderSize, newStream->width,
2017 newStream->height)) {
2018 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2019 !IS_USAGE_ZSL(newStream->usage)) {
2020 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2021 }
2022 numStreamsOnEncoder++;
2023 }
2024 break;
2025 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2026 processedStreamCnt++;
2027 if (isOnEncoder(maxViewfinderSize, newStream->width,
2028 newStream->height)) {
2029 // If Yuv888 size is not greater than 4K, set feature mask
2030 // to SUPERSET so that it support concurrent request on
2031 // YUV and JPEG.
2032 if (newStream->width <= VIDEO_4K_WIDTH &&
2033 newStream->height <= VIDEO_4K_HEIGHT) {
2034 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2035 }
2036 numStreamsOnEncoder++;
2037 numYuv888OnEncoder++;
2038 largeYuv888Size.width = newStream->width;
2039 largeYuv888Size.height = newStream->height;
2040 }
2041 break;
2042 default:
2043 processedStreamCnt++;
2044 if (isOnEncoder(maxViewfinderSize, newStream->width,
2045 newStream->height)) {
2046 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 }
2051
2052 }
2053 }
2054
2055 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2056 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2057 !m_bIsVideo) {
2058 m_bEisEnable = false;
2059 }
2060
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002061 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2062 pthread_mutex_unlock(&mMutex);
2063 return -EINVAL;
2064 }
2065
Thierry Strudel54dc9782017-02-15 12:12:10 -08002066 uint8_t forceEnableTnr = 0;
2067 char tnr_prop[PROPERTY_VALUE_MAX];
2068 memset(tnr_prop, 0, sizeof(tnr_prop));
2069 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2070 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2071
Thierry Strudel3d639192016-09-09 11:52:26 -07002072 /* Logic to enable/disable TNR based on specific config size/etc.*/
2073 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002074 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2075 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002076 else if (forceEnableTnr)
2077 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002078
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002079 char videoHdrProp[PROPERTY_VALUE_MAX];
2080 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2081 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2082 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2083
2084 if (hdr_mode_prop == 1 && m_bIsVideo &&
2085 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2086 m_bVideoHdrEnabled = true;
2087 else
2088 m_bVideoHdrEnabled = false;
2089
2090
Thierry Strudel3d639192016-09-09 11:52:26 -07002091 /* Check if num_streams is sane */
2092 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2093 rawStreamCnt > MAX_RAW_STREAMS ||
2094 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2095 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2096 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2097 pthread_mutex_unlock(&mMutex);
2098 return -EINVAL;
2099 }
2100 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002101 if (isZsl && m_bIs4KVideo) {
2102 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 pthread_mutex_unlock(&mMutex);
2104 return -EINVAL;
2105 }
2106 /* Check if stream sizes are sane */
2107 if (numStreamsOnEncoder > 2) {
2108 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2109 pthread_mutex_unlock(&mMutex);
2110 return -EINVAL;
2111 } else if (1 < numStreamsOnEncoder){
2112 bUseCommonFeatureMask = true;
2113 LOGH("Multiple streams above max viewfinder size, common mask needed");
2114 }
2115
2116 /* Check if BLOB size is greater than 4k in 4k recording case */
2117 if (m_bIs4KVideo && bJpegExceeds4K) {
2118 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2119 pthread_mutex_unlock(&mMutex);
2120 return -EINVAL;
2121 }
2122
Emilian Peev7650c122017-01-19 08:24:33 -08002123 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2124 depthPresent) {
2125 LOGE("HAL doesn't support depth streams in HFR mode!");
2126 pthread_mutex_unlock(&mMutex);
2127 return -EINVAL;
2128 }
2129
Thierry Strudel3d639192016-09-09 11:52:26 -07002130 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2131 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2132 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2133 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2134 // configurations:
2135 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2136 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2137 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2138 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2139 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2140 __func__);
2141 pthread_mutex_unlock(&mMutex);
2142 return -EINVAL;
2143 }
2144
2145 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2146 // the YUV stream's size is greater or equal to the JPEG size, set common
2147 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2148 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2149 jpegSize.width, jpegSize.height) &&
2150 largeYuv888Size.width > jpegSize.width &&
2151 largeYuv888Size.height > jpegSize.height) {
2152 bYuv888OverrideJpeg = true;
2153 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2154 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2155 }
2156
2157 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2158 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2159 commonFeatureMask);
2160 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2161 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2162
2163 rc = validateStreamDimensions(streamList);
2164 if (rc == NO_ERROR) {
2165 rc = validateStreamRotations(streamList);
2166 }
2167 if (rc != NO_ERROR) {
2168 LOGE("Invalid stream configuration requested!");
2169 pthread_mutex_unlock(&mMutex);
2170 return rc;
2171 }
2172
Emilian Peev0f3c3162017-03-15 12:57:46 +00002173 if (1 < pdStatCount) {
2174 LOGE("HAL doesn't support multiple PD streams");
2175 pthread_mutex_unlock(&mMutex);
2176 return -EINVAL;
2177 }
2178
2179 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2180 (1 == pdStatCount)) {
2181 LOGE("HAL doesn't support PD streams in HFR mode!");
2182 pthread_mutex_unlock(&mMutex);
2183 return -EINVAL;
2184 }
2185
Thierry Strudel3d639192016-09-09 11:52:26 -07002186 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2187 for (size_t i = 0; i < streamList->num_streams; i++) {
2188 camera3_stream_t *newStream = streamList->streams[i];
2189 LOGH("newStream type = %d, stream format = %d "
2190 "stream size : %d x %d, stream rotation = %d",
2191 newStream->stream_type, newStream->format,
2192 newStream->width, newStream->height, newStream->rotation);
2193 //if the stream is in the mStreamList validate it
2194 bool stream_exists = false;
2195 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2196 it != mStreamInfo.end(); it++) {
2197 if ((*it)->stream == newStream) {
2198 QCamera3ProcessingChannel *channel =
2199 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2200 stream_exists = true;
2201 if (channel)
2202 delete channel;
2203 (*it)->status = VALID;
2204 (*it)->stream->priv = NULL;
2205 (*it)->channel = NULL;
2206 }
2207 }
2208 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2209 //new stream
2210 stream_info_t* stream_info;
2211 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2212 if (!stream_info) {
2213 LOGE("Could not allocate stream info");
2214 rc = -ENOMEM;
2215 pthread_mutex_unlock(&mMutex);
2216 return rc;
2217 }
2218 stream_info->stream = newStream;
2219 stream_info->status = VALID;
2220 stream_info->channel = NULL;
2221 mStreamInfo.push_back(stream_info);
2222 }
2223 /* Covers Opaque ZSL and API1 F/W ZSL */
2224 if (IS_USAGE_ZSL(newStream->usage)
2225 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2226 if (zslStream != NULL) {
2227 LOGE("Multiple input/reprocess streams requested!");
2228 pthread_mutex_unlock(&mMutex);
2229 return BAD_VALUE;
2230 }
2231 zslStream = newStream;
2232 }
2233 /* Covers YUV reprocess */
2234 if (inputStream != NULL) {
2235 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2236 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2237 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2238 && inputStream->width == newStream->width
2239 && inputStream->height == newStream->height) {
2240 if (zslStream != NULL) {
2241 /* This scenario indicates multiple YUV streams with same size
2242 * as input stream have been requested, since zsl stream handle
2243 * is solely use for the purpose of overriding the size of streams
2244 * which share h/w streams we will just make a guess here as to
2245 * which of the stream is a ZSL stream, this will be refactored
2246 * once we make generic logic for streams sharing encoder output
2247 */
2248 LOGH("Warning, Multiple ip/reprocess streams requested!");
2249 }
2250 zslStream = newStream;
2251 }
2252 }
2253 }
2254
2255 /* If a zsl stream is set, we know that we have configured at least one input or
2256 bidirectional stream */
2257 if (NULL != zslStream) {
2258 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2259 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2260 mInputStreamInfo.format = zslStream->format;
2261 mInputStreamInfo.usage = zslStream->usage;
2262 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2263 mInputStreamInfo.dim.width,
2264 mInputStreamInfo.dim.height,
2265 mInputStreamInfo.format, mInputStreamInfo.usage);
2266 }
2267
2268 cleanAndSortStreamInfo();
2269 if (mMetadataChannel) {
2270 delete mMetadataChannel;
2271 mMetadataChannel = NULL;
2272 }
2273 if (mSupportChannel) {
2274 delete mSupportChannel;
2275 mSupportChannel = NULL;
2276 }
2277
2278 if (mAnalysisChannel) {
2279 delete mAnalysisChannel;
2280 mAnalysisChannel = NULL;
2281 }
2282
2283 if (mDummyBatchChannel) {
2284 delete mDummyBatchChannel;
2285 mDummyBatchChannel = NULL;
2286 }
2287
Emilian Peev7650c122017-01-19 08:24:33 -08002288 if (mDepthChannel) {
2289 mDepthChannel = NULL;
2290 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002291 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002292
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002293 mShutterDispatcher.clear();
2294 mOutputBufferDispatcher.clear();
2295
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 char is_type_value[PROPERTY_VALUE_MAX];
2297 property_get("persist.camera.is_type", is_type_value, "4");
2298 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2299
Binhao Line406f062017-05-03 14:39:44 -07002300 char property_value[PROPERTY_VALUE_MAX];
2301 property_get("persist.camera.gzoom.at", property_value, "0");
2302 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002303 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2304 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2305 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2306 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002307
2308 property_get("persist.camera.gzoom.4k", property_value, "0");
2309 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2310
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 //Create metadata channel and initialize it
2312 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2313 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2314 gCamCapability[mCameraId]->color_arrangement);
2315 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2316 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002317 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002318 if (mMetadataChannel == NULL) {
2319 LOGE("failed to allocate metadata channel");
2320 rc = -ENOMEM;
2321 pthread_mutex_unlock(&mMutex);
2322 return rc;
2323 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002324 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2326 if (rc < 0) {
2327 LOGE("metadata channel initialization failed");
2328 delete mMetadataChannel;
2329 mMetadataChannel = NULL;
2330 pthread_mutex_unlock(&mMutex);
2331 return rc;
2332 }
2333
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002336 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002337 // Keep track of preview/video streams indices.
2338 // There could be more than one preview streams, but only one video stream.
2339 int32_t video_stream_idx = -1;
2340 int32_t preview_stream_idx[streamList->num_streams];
2341 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002342 bool previewTnr[streamList->num_streams];
2343 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2344 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2345 // Loop through once to determine preview TNR conditions before creating channels.
2346 for (size_t i = 0; i < streamList->num_streams; i++) {
2347 camera3_stream_t *newStream = streamList->streams[i];
2348 uint32_t stream_usage = newStream->usage;
2349 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2350 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2351 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2352 video_stream_idx = (int32_t)i;
2353 else
2354 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2355 }
2356 }
2357 // By default, preview stream TNR is disabled.
2358 // Enable TNR to the preview stream if all conditions below are satisfied:
2359 // 1. preview resolution == video resolution.
2360 // 2. video stream TNR is enabled.
2361 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2362 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2363 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2364 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2365 if (m_bTnrEnabled && m_bTnrVideo &&
2366 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2367 video_stream->width == preview_stream->width &&
2368 video_stream->height == preview_stream->height) {
2369 previewTnr[preview_stream_idx[i]] = true;
2370 }
2371 }
2372
Thierry Strudel3d639192016-09-09 11:52:26 -07002373 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2374 /* Allocate channel objects for the requested streams */
2375 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002376
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 camera3_stream_t *newStream = streamList->streams[i];
2378 uint32_t stream_usage = newStream->usage;
2379 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2380 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2381 struct camera_info *p_info = NULL;
2382 pthread_mutex_lock(&gCamLock);
2383 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2384 pthread_mutex_unlock(&gCamLock);
2385 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2386 || IS_USAGE_ZSL(newStream->usage)) &&
2387 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002390 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2391 if (bUseCommonFeatureMask)
2392 zsl_ppmask = commonFeatureMask;
2393 else
2394 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002396 if (numStreamsOnEncoder > 0)
2397 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2398 else
2399 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002403 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002404 LOGH("Input stream configured, reprocess config");
2405 } else {
2406 //for non zsl streams find out the format
2407 switch (newStream->format) {
2408 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2409 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2412 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2413 /* add additional features to pp feature mask */
2414 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2415 mStreamConfigInfo.num_streams);
2416
2417 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2418 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2419 CAM_STREAM_TYPE_VIDEO;
2420 if (m_bTnrEnabled && m_bTnrVideo) {
2421 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2422 CAM_QCOM_FEATURE_CPP_TNR;
2423 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2424 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2425 ~CAM_QCOM_FEATURE_CDS;
2426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2429 CAM_QTI_FEATURE_PPEISCORE;
2430 }
Binhao Line406f062017-05-03 14:39:44 -07002431 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2432 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2433 CAM_QCOM_FEATURE_GOOG_ZOOM;
2434 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002435 } else {
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2437 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002438 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2440 CAM_QCOM_FEATURE_CPP_TNR;
2441 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2443 ~CAM_QCOM_FEATURE_CDS;
2444 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002445 if(!m_bSwTnrPreview) {
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2447 ~CAM_QTI_FEATURE_SW_TNR;
2448 }
Binhao Line406f062017-05-03 14:39:44 -07002449 if (is_goog_zoom_preview_enabled) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2451 CAM_QCOM_FEATURE_GOOG_ZOOM;
2452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 padding_info.width_padding = mSurfaceStridePadding;
2454 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002455 previewSize.width = (int32_t)newStream->width;
2456 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 }
2458 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2459 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2460 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2461 newStream->height;
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2463 newStream->width;
2464 }
2465 }
2466 break;
2467 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002468 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2470 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2471 if (bUseCommonFeatureMask)
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2473 commonFeatureMask;
2474 else
2475 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2476 CAM_QCOM_FEATURE_NONE;
2477 } else {
2478 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2479 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2480 }
2481 break;
2482 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002483 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002484 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2485 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2486 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2488 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002489 /* Remove rotation if it is not supported
2490 for 4K LiveVideo snapshot case (online processing) */
2491 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2492 CAM_QCOM_FEATURE_ROTATION)) {
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2494 &= ~CAM_QCOM_FEATURE_ROTATION;
2495 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002496 } else {
2497 if (bUseCommonFeatureMask &&
2498 isOnEncoder(maxViewfinderSize, newStream->width,
2499 newStream->height)) {
2500 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2501 } else {
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2503 }
2504 }
2505 if (isZsl) {
2506 if (zslStream) {
2507 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2508 (int32_t)zslStream->width;
2509 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2510 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002511 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2512 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002513 } else {
2514 LOGE("Error, No ZSL stream identified");
2515 pthread_mutex_unlock(&mMutex);
2516 return -EINVAL;
2517 }
2518 } else if (m_bIs4KVideo) {
2519 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2520 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2521 } else if (bYuv888OverrideJpeg) {
2522 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2523 (int32_t)largeYuv888Size.width;
2524 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2525 (int32_t)largeYuv888Size.height;
2526 }
2527 break;
2528 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2529 case HAL_PIXEL_FORMAT_RAW16:
2530 case HAL_PIXEL_FORMAT_RAW10:
2531 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2532 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2533 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002534 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2535 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2536 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2537 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2538 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2539 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2540 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2541 gCamCapability[mCameraId]->dt[mPDIndex];
2542 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2543 gCamCapability[mCameraId]->vc[mPDIndex];
2544 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002545 break;
2546 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002547 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2549 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2550 break;
2551 }
2552 }
2553
2554 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2555 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2556 gCamCapability[mCameraId]->color_arrangement);
2557
2558 if (newStream->priv == NULL) {
2559 //New stream, construct channel
2560 switch (newStream->stream_type) {
2561 case CAMERA3_STREAM_INPUT:
2562 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2563 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2564 break;
2565 case CAMERA3_STREAM_BIDIRECTIONAL:
2566 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2567 GRALLOC_USAGE_HW_CAMERA_WRITE;
2568 break;
2569 case CAMERA3_STREAM_OUTPUT:
2570 /* For video encoding stream, set read/write rarely
2571 * flag so that they may be set to un-cached */
2572 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2573 newStream->usage |=
2574 (GRALLOC_USAGE_SW_READ_RARELY |
2575 GRALLOC_USAGE_SW_WRITE_RARELY |
2576 GRALLOC_USAGE_HW_CAMERA_WRITE);
2577 else if (IS_USAGE_ZSL(newStream->usage))
2578 {
2579 LOGD("ZSL usage flag skipping");
2580 }
2581 else if (newStream == zslStream
2582 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2583 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2584 } else
2585 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2586 break;
2587 default:
2588 LOGE("Invalid stream_type %d", newStream->stream_type);
2589 break;
2590 }
2591
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002592 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2594 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2595 QCamera3ProcessingChannel *channel = NULL;
2596 switch (newStream->format) {
2597 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2598 if ((newStream->usage &
2599 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2600 (streamList->operation_mode ==
2601 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2602 ) {
2603 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2604 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002605 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 this,
2607 newStream,
2608 (cam_stream_type_t)
2609 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2610 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2611 mMetadataChannel,
2612 0); //heap buffers are not required for HFR video channel
2613 if (channel == NULL) {
2614 LOGE("allocation of channel failed");
2615 pthread_mutex_unlock(&mMutex);
2616 return -ENOMEM;
2617 }
2618 //channel->getNumBuffers() will return 0 here so use
2619 //MAX_INFLIGH_HFR_REQUESTS
2620 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2621 newStream->priv = channel;
2622 LOGI("num video buffers in HFR mode: %d",
2623 MAX_INFLIGHT_HFR_REQUESTS);
2624 } else {
2625 /* Copy stream contents in HFR preview only case to create
2626 * dummy batch channel so that sensor streaming is in
2627 * HFR mode */
2628 if (!m_bIsVideo && (streamList->operation_mode ==
2629 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2630 mDummyBatchStream = *newStream;
2631 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002632 int bufferCount = MAX_INFLIGHT_REQUESTS;
2633 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2634 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002635 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2636 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2637 bufferCount = m_bIs4KVideo ?
2638 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2639 }
2640
Thierry Strudel2896d122017-02-23 19:18:03 -08002641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002642 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2643 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002644 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002645 this,
2646 newStream,
2647 (cam_stream_type_t)
2648 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2649 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2650 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002651 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 if (channel == NULL) {
2653 LOGE("allocation of channel failed");
2654 pthread_mutex_unlock(&mMutex);
2655 return -ENOMEM;
2656 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002657 /* disable UBWC for preview, though supported,
2658 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002659 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002660 (previewSize.width == (int32_t)videoWidth)&&
2661 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002662 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002663 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002665 /* When goog_zoom is linked to the preview or video stream,
2666 * disable ubwc to the linked stream */
2667 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2668 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2669 channel->setUBWCEnabled(false);
2670 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 newStream->max_buffers = channel->getNumBuffers();
2672 newStream->priv = channel;
2673 }
2674 break;
2675 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2676 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2677 mChannelHandle,
2678 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002679 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002680 this,
2681 newStream,
2682 (cam_stream_type_t)
2683 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2684 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2685 mMetadataChannel);
2686 if (channel == NULL) {
2687 LOGE("allocation of YUV channel failed");
2688 pthread_mutex_unlock(&mMutex);
2689 return -ENOMEM;
2690 }
2691 newStream->max_buffers = channel->getNumBuffers();
2692 newStream->priv = channel;
2693 break;
2694 }
2695 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2696 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002697 case HAL_PIXEL_FORMAT_RAW10: {
2698 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2699 (HAL_DATASPACE_DEPTH != newStream->data_space))
2700 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002701 mRawChannel = new QCamera3RawChannel(
2702 mCameraHandle->camera_handle, mChannelHandle,
2703 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002704 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002705 this, newStream,
2706 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002707 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002708 if (mRawChannel == NULL) {
2709 LOGE("allocation of raw channel failed");
2710 pthread_mutex_unlock(&mMutex);
2711 return -ENOMEM;
2712 }
2713 newStream->max_buffers = mRawChannel->getNumBuffers();
2714 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2715 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002716 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002717 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002718 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2719 mDepthChannel = new QCamera3DepthChannel(
2720 mCameraHandle->camera_handle, mChannelHandle,
2721 mCameraHandle->ops, NULL, NULL, &padding_info,
2722 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2723 mMetadataChannel);
2724 if (NULL == mDepthChannel) {
2725 LOGE("Allocation of depth channel failed");
2726 pthread_mutex_unlock(&mMutex);
2727 return NO_MEMORY;
2728 }
2729 newStream->priv = mDepthChannel;
2730 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2731 } else {
2732 // Max live snapshot inflight buffer is 1. This is to mitigate
2733 // frame drop issues for video snapshot. The more buffers being
2734 // allocated, the more frame drops there are.
2735 mPictureChannel = new QCamera3PicChannel(
2736 mCameraHandle->camera_handle, mChannelHandle,
2737 mCameraHandle->ops, captureResultCb,
2738 setBufferErrorStatus, &padding_info, this, newStream,
2739 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2740 m_bIs4KVideo, isZsl, mMetadataChannel,
2741 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2742 if (mPictureChannel == NULL) {
2743 LOGE("allocation of channel failed");
2744 pthread_mutex_unlock(&mMutex);
2745 return -ENOMEM;
2746 }
2747 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2748 newStream->max_buffers = mPictureChannel->getNumBuffers();
2749 mPictureChannel->overrideYuvSize(
2750 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2751 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002753 break;
2754
2755 default:
2756 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002757 pthread_mutex_unlock(&mMutex);
2758 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002759 }
2760 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2761 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2762 } else {
2763 LOGE("Error, Unknown stream type");
2764 pthread_mutex_unlock(&mMutex);
2765 return -EINVAL;
2766 }
2767
2768 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002769 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002770 // Here we only care whether it's EIS3 or not
2771 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2772 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2773 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2774 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002775 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002776 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002777 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2779 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2780 }
2781 }
2782
2783 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2784 it != mStreamInfo.end(); it++) {
2785 if ((*it)->stream == newStream) {
2786 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2787 break;
2788 }
2789 }
2790 } else {
2791 // Channel already exists for this stream
2792 // Do nothing for now
2793 }
2794 padding_info = gCamCapability[mCameraId]->padding_info;
2795
Emilian Peev7650c122017-01-19 08:24:33 -08002796 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002797 * since there is no real stream associated with it
2798 */
Emilian Peev7650c122017-01-19 08:24:33 -08002799 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002800 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2801 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002803 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 }
2805
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002806 // Let buffer dispatcher know the configured streams.
2807 mOutputBufferDispatcher.configureStreams(streamList);
2808
Thierry Strudel2896d122017-02-23 19:18:03 -08002809 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2810 onlyRaw = false;
2811 }
2812
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002813 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002814 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002815 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002816 cam_analysis_info_t analysisInfo;
2817 int32_t ret = NO_ERROR;
2818 ret = mCommon.getAnalysisInfo(
2819 FALSE,
2820 analysisFeatureMask,
2821 &analysisInfo);
2822 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002823 cam_color_filter_arrangement_t analysis_color_arrangement =
2824 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2825 CAM_FILTER_ARRANGEMENT_Y :
2826 gCamCapability[mCameraId]->color_arrangement);
2827 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2828 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002829 cam_dimension_t analysisDim;
2830 analysisDim = mCommon.getMatchingDimension(previewSize,
2831 analysisInfo.analysis_recommended_res);
2832
2833 mAnalysisChannel = new QCamera3SupportChannel(
2834 mCameraHandle->camera_handle,
2835 mChannelHandle,
2836 mCameraHandle->ops,
2837 &analysisInfo.analysis_padding_info,
2838 analysisFeatureMask,
2839 CAM_STREAM_TYPE_ANALYSIS,
2840 &analysisDim,
2841 (analysisInfo.analysis_format
2842 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2843 : CAM_FORMAT_YUV_420_NV21),
2844 analysisInfo.hw_analysis_supported,
2845 gCamCapability[mCameraId]->color_arrangement,
2846 this,
2847 0); // force buffer count to 0
2848 } else {
2849 LOGW("getAnalysisInfo failed, ret = %d", ret);
2850 }
2851 if (!mAnalysisChannel) {
2852 LOGW("Analysis channel cannot be created");
2853 }
2854 }
2855
Thierry Strudel3d639192016-09-09 11:52:26 -07002856 //RAW DUMP channel
2857 if (mEnableRawDump && isRawStreamRequested == false){
2858 cam_dimension_t rawDumpSize;
2859 rawDumpSize = getMaxRawSize(mCameraId);
2860 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2861 setPAAFSupport(rawDumpFeatureMask,
2862 CAM_STREAM_TYPE_RAW,
2863 gCamCapability[mCameraId]->color_arrangement);
2864 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2865 mChannelHandle,
2866 mCameraHandle->ops,
2867 rawDumpSize,
2868 &padding_info,
2869 this, rawDumpFeatureMask);
2870 if (!mRawDumpChannel) {
2871 LOGE("Raw Dump channel cannot be created");
2872 pthread_mutex_unlock(&mMutex);
2873 return -ENOMEM;
2874 }
2875 }
2876
Thierry Strudel3d639192016-09-09 11:52:26 -07002877 if (mAnalysisChannel) {
2878 cam_analysis_info_t analysisInfo;
2879 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2881 CAM_STREAM_TYPE_ANALYSIS;
2882 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2883 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002884 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2886 &analysisInfo);
2887 if (rc != NO_ERROR) {
2888 LOGE("getAnalysisInfo failed, ret = %d", rc);
2889 pthread_mutex_unlock(&mMutex);
2890 return rc;
2891 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002892 cam_color_filter_arrangement_t analysis_color_arrangement =
2893 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2894 CAM_FILTER_ARRANGEMENT_Y :
2895 gCamCapability[mCameraId]->color_arrangement);
2896 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2897 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2898 analysis_color_arrangement);
2899
Thierry Strudel3d639192016-09-09 11:52:26 -07002900 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002901 mCommon.getMatchingDimension(previewSize,
2902 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 mStreamConfigInfo.num_streams++;
2904 }
2905
Thierry Strudel2896d122017-02-23 19:18:03 -08002906 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 cam_analysis_info_t supportInfo;
2908 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2909 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2910 setPAAFSupport(callbackFeatureMask,
2911 CAM_STREAM_TYPE_CALLBACK,
2912 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002914 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002915 if (ret != NO_ERROR) {
2916 /* Ignore the error for Mono camera
2917 * because the PAAF bit mask is only set
2918 * for CAM_STREAM_TYPE_ANALYSIS stream type
2919 */
2920 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2921 LOGW("getAnalysisInfo failed, ret = %d", ret);
2922 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002923 }
2924 mSupportChannel = new QCamera3SupportChannel(
2925 mCameraHandle->camera_handle,
2926 mChannelHandle,
2927 mCameraHandle->ops,
2928 &gCamCapability[mCameraId]->padding_info,
2929 callbackFeatureMask,
2930 CAM_STREAM_TYPE_CALLBACK,
2931 &QCamera3SupportChannel::kDim,
2932 CAM_FORMAT_YUV_420_NV21,
2933 supportInfo.hw_analysis_supported,
2934 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002935 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002936 if (!mSupportChannel) {
2937 LOGE("dummy channel cannot be created");
2938 pthread_mutex_unlock(&mMutex);
2939 return -ENOMEM;
2940 }
2941 }
2942
2943 if (mSupportChannel) {
2944 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2945 QCamera3SupportChannel::kDim;
2946 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2947 CAM_STREAM_TYPE_CALLBACK;
2948 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2949 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2950 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2952 gCamCapability[mCameraId]->color_arrangement);
2953 mStreamConfigInfo.num_streams++;
2954 }
2955
2956 if (mRawDumpChannel) {
2957 cam_dimension_t rawSize;
2958 rawSize = getMaxRawSize(mCameraId);
2959 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2960 rawSize;
2961 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2962 CAM_STREAM_TYPE_RAW;
2963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2964 CAM_QCOM_FEATURE_NONE;
2965 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2966 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2967 gCamCapability[mCameraId]->color_arrangement);
2968 mStreamConfigInfo.num_streams++;
2969 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002970
2971 if (mHdrPlusRawSrcChannel) {
2972 cam_dimension_t rawSize;
2973 rawSize = getMaxRawSize(mCameraId);
2974 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2975 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2976 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2977 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2978 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2979 gCamCapability[mCameraId]->color_arrangement);
2980 mStreamConfigInfo.num_streams++;
2981 }
2982
Thierry Strudel3d639192016-09-09 11:52:26 -07002983 /* In HFR mode, if video stream is not added, create a dummy channel so that
2984 * ISP can create a batch mode even for preview only case. This channel is
2985 * never 'start'ed (no stream-on), it is only 'initialized' */
2986 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2987 !m_bIsVideo) {
2988 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2989 setPAAFSupport(dummyFeatureMask,
2990 CAM_STREAM_TYPE_VIDEO,
2991 gCamCapability[mCameraId]->color_arrangement);
2992 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2993 mChannelHandle,
2994 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002995 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002996 this,
2997 &mDummyBatchStream,
2998 CAM_STREAM_TYPE_VIDEO,
2999 dummyFeatureMask,
3000 mMetadataChannel);
3001 if (NULL == mDummyBatchChannel) {
3002 LOGE("creation of mDummyBatchChannel failed."
3003 "Preview will use non-hfr sensor mode ");
3004 }
3005 }
3006 if (mDummyBatchChannel) {
3007 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3008 mDummyBatchStream.width;
3009 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3010 mDummyBatchStream.height;
3011 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3012 CAM_STREAM_TYPE_VIDEO;
3013 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3014 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mStreamConfigInfo.num_streams++;
3019 }
3020
3021 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3022 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003023 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003024 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003025
3026 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3027 for (pendingRequestIterator i = mPendingRequestsList.begin();
3028 i != mPendingRequestsList.end();) {
3029 i = erasePendingRequest(i);
3030 }
3031 mPendingFrameDropList.clear();
3032 // Initialize/Reset the pending buffers list
3033 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3034 req.mPendingBufferList.clear();
3035 }
3036 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003037 mExpectedInflightDuration = 0;
3038 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003039
Thierry Strudel3d639192016-09-09 11:52:26 -07003040 mCurJpegMeta.clear();
3041 //Get min frame duration for this streams configuration
3042 deriveMinFrameDuration();
3043
Chien-Yu Chenee335912017-02-09 17:53:20 -08003044 mFirstPreviewIntentSeen = false;
3045
3046 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003047 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003048 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3049 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003050 disableHdrPlusModeLocked();
3051 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003052
Thierry Strudel3d639192016-09-09 11:52:26 -07003053 // Update state
3054 mState = CONFIGURED;
3055
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003056 mFirstMetadataCallback = true;
3057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 pthread_mutex_unlock(&mMutex);
3059
3060 return rc;
3061}
3062
3063/*===========================================================================
3064 * FUNCTION : validateCaptureRequest
3065 *
3066 * DESCRIPTION: validate a capture request from camera service
3067 *
3068 * PARAMETERS :
3069 * @request : request from framework to process
3070 *
3071 * RETURN :
3072 *
3073 *==========================================================================*/
3074int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003075 camera3_capture_request_t *request,
3076 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003077{
3078 ssize_t idx = 0;
3079 const camera3_stream_buffer_t *b;
3080 CameraMetadata meta;
3081
3082 /* Sanity check the request */
3083 if (request == NULL) {
3084 LOGE("NULL capture request");
3085 return BAD_VALUE;
3086 }
3087
3088 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3089 /*settings cannot be null for the first request*/
3090 return BAD_VALUE;
3091 }
3092
3093 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3095 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003096 LOGE("Request %d: No output buffers provided!",
3097 __FUNCTION__, frameNumber);
3098 return BAD_VALUE;
3099 }
3100 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3101 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3102 request->num_output_buffers, MAX_NUM_STREAMS);
3103 return BAD_VALUE;
3104 }
3105 if (request->input_buffer != NULL) {
3106 b = request->input_buffer;
3107 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3108 LOGE("Request %d: Buffer %ld: Status not OK!",
3109 frameNumber, (long)idx);
3110 return BAD_VALUE;
3111 }
3112 if (b->release_fence != -1) {
3113 LOGE("Request %d: Buffer %ld: Has a release fence!",
3114 frameNumber, (long)idx);
3115 return BAD_VALUE;
3116 }
3117 if (b->buffer == NULL) {
3118 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3119 frameNumber, (long)idx);
3120 return BAD_VALUE;
3121 }
3122 }
3123
3124 // Validate all buffers
3125 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003126 if (b == NULL) {
3127 return BAD_VALUE;
3128 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003129 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003130 QCamera3ProcessingChannel *channel =
3131 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3132 if (channel == NULL) {
3133 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3134 frameNumber, (long)idx);
3135 return BAD_VALUE;
3136 }
3137 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3138 LOGE("Request %d: Buffer %ld: Status not OK!",
3139 frameNumber, (long)idx);
3140 return BAD_VALUE;
3141 }
3142 if (b->release_fence != -1) {
3143 LOGE("Request %d: Buffer %ld: Has a release fence!",
3144 frameNumber, (long)idx);
3145 return BAD_VALUE;
3146 }
3147 if (b->buffer == NULL) {
3148 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3149 frameNumber, (long)idx);
3150 return BAD_VALUE;
3151 }
3152 if (*(b->buffer) == NULL) {
3153 LOGE("Request %d: Buffer %ld: NULL private handle!",
3154 frameNumber, (long)idx);
3155 return BAD_VALUE;
3156 }
3157 idx++;
3158 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003159 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003160 return NO_ERROR;
3161}
3162
3163/*===========================================================================
3164 * FUNCTION : deriveMinFrameDuration
3165 *
3166 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3167 * on currently configured streams.
3168 *
3169 * PARAMETERS : NONE
3170 *
3171 * RETURN : NONE
3172 *
3173 *==========================================================================*/
3174void QCamera3HardwareInterface::deriveMinFrameDuration()
3175{
3176 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003177 bool hasRaw = false;
3178
3179 mMinRawFrameDuration = 0;
3180 mMinJpegFrameDuration = 0;
3181 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003182
3183 maxJpegDim = 0;
3184 maxProcessedDim = 0;
3185 maxRawDim = 0;
3186
3187 // Figure out maximum jpeg, processed, and raw dimensions
3188 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3189 it != mStreamInfo.end(); it++) {
3190
3191 // Input stream doesn't have valid stream_type
3192 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3193 continue;
3194
3195 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3196 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3197 if (dimension > maxJpegDim)
3198 maxJpegDim = dimension;
3199 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3200 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3201 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003202 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003203 if (dimension > maxRawDim)
3204 maxRawDim = dimension;
3205 } else {
3206 if (dimension > maxProcessedDim)
3207 maxProcessedDim = dimension;
3208 }
3209 }
3210
3211 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3212 MAX_SIZES_CNT);
3213
3214 //Assume all jpeg dimensions are in processed dimensions.
3215 if (maxJpegDim > maxProcessedDim)
3216 maxProcessedDim = maxJpegDim;
3217 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003218 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003219 maxRawDim = INT32_MAX;
3220
3221 for (size_t i = 0; i < count; i++) {
3222 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3223 gCamCapability[mCameraId]->raw_dim[i].height;
3224 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3225 maxRawDim = dimension;
3226 }
3227 }
3228
3229 //Find minimum durations for processed, jpeg, and raw
3230 for (size_t i = 0; i < count; i++) {
3231 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3232 gCamCapability[mCameraId]->raw_dim[i].height) {
3233 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3234 break;
3235 }
3236 }
3237 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3238 for (size_t i = 0; i < count; i++) {
3239 if (maxProcessedDim ==
3240 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3241 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3242 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3243 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3244 break;
3245 }
3246 }
3247}
3248
3249/*===========================================================================
3250 * FUNCTION : getMinFrameDuration
3251 *
3252 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3253 * and current request configuration.
3254 *
3255 * PARAMETERS : @request: requset sent by the frameworks
3256 *
3257 * RETURN : min farme duration for a particular request
3258 *
3259 *==========================================================================*/
3260int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3261{
3262 bool hasJpegStream = false;
3263 bool hasRawStream = false;
3264 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3265 const camera3_stream_t *stream = request->output_buffers[i].stream;
3266 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3267 hasJpegStream = true;
3268 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3269 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3270 stream->format == HAL_PIXEL_FORMAT_RAW16)
3271 hasRawStream = true;
3272 }
3273
3274 if (!hasJpegStream)
3275 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3276 else
3277 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3278}
3279
3280/*===========================================================================
3281 * FUNCTION : handleBuffersDuringFlushLock
3282 *
3283 * DESCRIPTION: Account for buffers returned from back-end during flush
3284 * This function is executed while mMutex is held by the caller.
3285 *
3286 * PARAMETERS :
3287 * @buffer: image buffer for the callback
3288 *
3289 * RETURN :
3290 *==========================================================================*/
3291void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3292{
3293 bool buffer_found = false;
3294 for (List<PendingBuffersInRequest>::iterator req =
3295 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3296 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3297 for (List<PendingBufferInfo>::iterator i =
3298 req->mPendingBufferList.begin();
3299 i != req->mPendingBufferList.end(); i++) {
3300 if (i->buffer == buffer->buffer) {
3301 mPendingBuffersMap.numPendingBufsAtFlush--;
3302 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3303 buffer->buffer, req->frame_number,
3304 mPendingBuffersMap.numPendingBufsAtFlush);
3305 buffer_found = true;
3306 break;
3307 }
3308 }
3309 if (buffer_found) {
3310 break;
3311 }
3312 }
3313 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3314 //signal the flush()
3315 LOGD("All buffers returned to HAL. Continue flush");
3316 pthread_cond_signal(&mBuffersCond);
3317 }
3318}
3319
Thierry Strudel3d639192016-09-09 11:52:26 -07003320/*===========================================================================
3321 * FUNCTION : handleBatchMetadata
3322 *
3323 * DESCRIPTION: Handles metadata buffer callback in batch mode
3324 *
3325 * PARAMETERS : @metadata_buf: metadata buffer
3326 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3327 * the meta buf in this method
3328 *
3329 * RETURN :
3330 *
3331 *==========================================================================*/
3332void QCamera3HardwareInterface::handleBatchMetadata(
3333 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3334{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003335 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003336
3337 if (NULL == metadata_buf) {
3338 LOGE("metadata_buf is NULL");
3339 return;
3340 }
3341 /* In batch mode, the metdata will contain the frame number and timestamp of
3342 * the last frame in the batch. Eg: a batch containing buffers from request
3343 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3344 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3345 * multiple process_capture_results */
3346 metadata_buffer_t *metadata =
3347 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3348 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3349 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3350 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3351 uint32_t frame_number = 0, urgent_frame_number = 0;
3352 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3353 bool invalid_metadata = false;
3354 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3355 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003356 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003357
3358 int32_t *p_frame_number_valid =
3359 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3360 uint32_t *p_frame_number =
3361 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3362 int64_t *p_capture_time =
3363 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3364 int32_t *p_urgent_frame_number_valid =
3365 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3366 uint32_t *p_urgent_frame_number =
3367 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3368
3369 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3370 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3371 (NULL == p_urgent_frame_number)) {
3372 LOGE("Invalid metadata");
3373 invalid_metadata = true;
3374 } else {
3375 frame_number_valid = *p_frame_number_valid;
3376 last_frame_number = *p_frame_number;
3377 last_frame_capture_time = *p_capture_time;
3378 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3379 last_urgent_frame_number = *p_urgent_frame_number;
3380 }
3381
3382 /* In batchmode, when no video buffers are requested, set_parms are sent
3383 * for every capture_request. The difference between consecutive urgent
3384 * frame numbers and frame numbers should be used to interpolate the
3385 * corresponding frame numbers and time stamps */
3386 pthread_mutex_lock(&mMutex);
3387 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003388 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3389 if(idx < 0) {
3390 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3391 last_urgent_frame_number);
3392 mState = ERROR;
3393 pthread_mutex_unlock(&mMutex);
3394 return;
3395 }
3396 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003397 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3398 first_urgent_frame_number;
3399
3400 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3401 urgent_frame_number_valid,
3402 first_urgent_frame_number, last_urgent_frame_number);
3403 }
3404
3405 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003406 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3407 if(idx < 0) {
3408 LOGE("Invalid frame number received: %d. Irrecoverable error",
3409 last_frame_number);
3410 mState = ERROR;
3411 pthread_mutex_unlock(&mMutex);
3412 return;
3413 }
3414 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003415 frameNumDiff = last_frame_number + 1 -
3416 first_frame_number;
3417 mPendingBatchMap.removeItem(last_frame_number);
3418
3419 LOGD("frm: valid: %d frm_num: %d - %d",
3420 frame_number_valid,
3421 first_frame_number, last_frame_number);
3422
3423 }
3424 pthread_mutex_unlock(&mMutex);
3425
3426 if (urgent_frame_number_valid || frame_number_valid) {
3427 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3428 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3429 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3430 urgentFrameNumDiff, last_urgent_frame_number);
3431 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3432 LOGE("frameNumDiff: %d frameNum: %d",
3433 frameNumDiff, last_frame_number);
3434 }
3435
3436 for (size_t i = 0; i < loopCount; i++) {
3437 /* handleMetadataWithLock is called even for invalid_metadata for
3438 * pipeline depth calculation */
3439 if (!invalid_metadata) {
3440 /* Infer frame number. Batch metadata contains frame number of the
3441 * last frame */
3442 if (urgent_frame_number_valid) {
3443 if (i < urgentFrameNumDiff) {
3444 urgent_frame_number =
3445 first_urgent_frame_number + i;
3446 LOGD("inferred urgent frame_number: %d",
3447 urgent_frame_number);
3448 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3449 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3450 } else {
3451 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3452 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3453 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3454 }
3455 }
3456
3457 /* Infer frame number. Batch metadata contains frame number of the
3458 * last frame */
3459 if (frame_number_valid) {
3460 if (i < frameNumDiff) {
3461 frame_number = first_frame_number + i;
3462 LOGD("inferred frame_number: %d", frame_number);
3463 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3464 CAM_INTF_META_FRAME_NUMBER, frame_number);
3465 } else {
3466 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3469 }
3470 }
3471
3472 if (last_frame_capture_time) {
3473 //Infer timestamp
3474 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003475 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003477 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3479 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3480 LOGD("batch capture_time: %lld, capture_time: %lld",
3481 last_frame_capture_time, capture_time);
3482 }
3483 }
3484 pthread_mutex_lock(&mMutex);
3485 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003486 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003487 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3488 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003489 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003490 pthread_mutex_unlock(&mMutex);
3491 }
3492
3493 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003494 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 mMetadataChannel->bufDone(metadata_buf);
3496 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003497 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 }
3499}
3500
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003501void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3502 camera3_error_msg_code_t errorCode)
3503{
3504 camera3_notify_msg_t notify_msg;
3505 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3506 notify_msg.type = CAMERA3_MSG_ERROR;
3507 notify_msg.message.error.error_code = errorCode;
3508 notify_msg.message.error.error_stream = NULL;
3509 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003510 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003511
3512 return;
3513}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003514
3515/*===========================================================================
3516 * FUNCTION : sendPartialMetadataWithLock
3517 *
3518 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3519 *
3520 * PARAMETERS : @metadata: metadata buffer
3521 * @requestIter: The iterator for the pending capture request for
3522 * which the partial result is being sen
3523 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3524 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003525 * @isJumpstartMetadata: Whether this is a partial metadata for
3526 * jumpstart, i.e. even though it doesn't map to a valid partial
3527 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003528 *
3529 * RETURN :
3530 *
3531 *==========================================================================*/
3532
3533void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3534 metadata_buffer_t *metadata,
3535 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003536 bool lastUrgentMetadataInBatch,
3537 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003538{
3539 camera3_capture_result_t result;
3540 memset(&result, 0, sizeof(camera3_capture_result_t));
3541
3542 requestIter->partial_result_cnt++;
3543
3544 // Extract 3A metadata
3545 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003546 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3547 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003548 // Populate metadata result
3549 result.frame_number = requestIter->frame_number;
3550 result.num_output_buffers = 0;
3551 result.output_buffers = NULL;
3552 result.partial_result = requestIter->partial_result_cnt;
3553
3554 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003555 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003556 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3557 // Notify HDR+ client about the partial metadata.
3558 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3559 result.partial_result == PARTIAL_RESULT_COUNT);
3560 }
3561 }
3562
3563 orchestrateResult(&result);
3564 LOGD("urgent frame_number = %u", result.frame_number);
3565 free_camera_metadata((camera_metadata_t *)result.result);
3566}
3567
Thierry Strudel3d639192016-09-09 11:52:26 -07003568/*===========================================================================
3569 * FUNCTION : handleMetadataWithLock
3570 *
3571 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3572 *
3573 * PARAMETERS : @metadata_buf: metadata buffer
3574 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3575 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003576 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3577 * last urgent metadata in a batch. Always true for non-batch mode
3578 * @lastMetadataInBatch: Boolean to indicate whether this is the
3579 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003580 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3581 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 *
3583 * RETURN :
3584 *
3585 *==========================================================================*/
3586void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003587 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003588 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3589 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003590{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003591 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3593 //during flush do not send metadata from this thread
3594 LOGD("not sending metadata during flush or when mState is error");
3595 if (free_and_bufdone_meta_buf) {
3596 mMetadataChannel->bufDone(metadata_buf);
3597 free(metadata_buf);
3598 }
3599 return;
3600 }
3601
3602 //not in flush
3603 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3604 int32_t frame_number_valid, urgent_frame_number_valid;
3605 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003606 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003607 nsecs_t currentSysTime;
3608
3609 int32_t *p_frame_number_valid =
3610 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3611 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3612 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003613 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003614 int32_t *p_urgent_frame_number_valid =
3615 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3616 uint32_t *p_urgent_frame_number =
3617 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3618 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3619 metadata) {
3620 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3621 *p_frame_number_valid, *p_frame_number);
3622 }
3623
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003624 camera_metadata_t *resultMetadata = nullptr;
3625
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3627 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3628 LOGE("Invalid metadata");
3629 if (free_and_bufdone_meta_buf) {
3630 mMetadataChannel->bufDone(metadata_buf);
3631 free(metadata_buf);
3632 }
3633 goto done_metadata;
3634 }
3635 frame_number_valid = *p_frame_number_valid;
3636 frame_number = *p_frame_number;
3637 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003638 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3640 urgent_frame_number = *p_urgent_frame_number;
3641 currentSysTime = systemTime(CLOCK_MONOTONIC);
3642
Jason Lee603176d2017-05-31 11:43:27 -07003643 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3644 const int tries = 3;
3645 nsecs_t bestGap, measured;
3646 for (int i = 0; i < tries; ++i) {
3647 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3648 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3649 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3650 const nsecs_t gap = tmono2 - tmono;
3651 if (i == 0 || gap < bestGap) {
3652 bestGap = gap;
3653 measured = tbase - ((tmono + tmono2) >> 1);
3654 }
3655 }
3656 capture_time -= measured;
3657 }
3658
Thierry Strudel3d639192016-09-09 11:52:26 -07003659 // Detect if buffers from any requests are overdue
3660 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 int64_t timeout;
3662 {
3663 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3664 // If there is a pending HDR+ request, the following requests may be blocked until the
3665 // HDR+ request is done. So allow a longer timeout.
3666 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3667 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003668 if (timeout < mExpectedInflightDuration) {
3669 timeout = mExpectedInflightDuration;
3670 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003671 }
3672
3673 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003674 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003675 assert(missed.stream->priv);
3676 if (missed.stream->priv) {
3677 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3678 assert(ch->mStreams[0]);
3679 if (ch->mStreams[0]) {
3680 LOGE("Cancel missing frame = %d, buffer = %p,"
3681 "stream type = %d, stream format = %d",
3682 req.frame_number, missed.buffer,
3683 ch->mStreams[0]->getMyType(), missed.stream->format);
3684 ch->timeoutFrame(req.frame_number);
3685 }
3686 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003687 }
3688 }
3689 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003690 //For the very first metadata callback, regardless whether it contains valid
3691 //frame number, send the partial metadata for the jumpstarting requests.
3692 //Note that this has to be done even if the metadata doesn't contain valid
3693 //urgent frame number, because in the case only 1 request is ever submitted
3694 //to HAL, there won't be subsequent valid urgent frame number.
3695 if (mFirstMetadataCallback) {
3696 for (pendingRequestIterator i =
3697 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3698 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003699 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3700 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003701 }
3702 }
3703 mFirstMetadataCallback = false;
3704 }
3705
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 //Partial result on process_capture_result for timestamp
3707 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003708 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003709
3710 //Recieved an urgent Frame Number, handle it
3711 //using partial results
3712 for (pendingRequestIterator i =
3713 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3714 LOGD("Iterator Frame = %d urgent frame = %d",
3715 i->frame_number, urgent_frame_number);
3716
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003717 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003718 (i->partial_result_cnt == 0)) {
3719 LOGE("Error: HAL missed urgent metadata for frame number %d",
3720 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003721 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003722 }
3723
3724 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003725 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003726 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3727 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003728 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3729 // Instant AEC settled for this frame.
3730 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3731 mInstantAECSettledFrameNumber = urgent_frame_number;
3732 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003733 break;
3734 }
3735 }
3736 }
3737
3738 if (!frame_number_valid) {
3739 LOGD("Not a valid normal frame number, used as SOF only");
3740 if (free_and_bufdone_meta_buf) {
3741 mMetadataChannel->bufDone(metadata_buf);
3742 free(metadata_buf);
3743 }
3744 goto done_metadata;
3745 }
3746 LOGH("valid frame_number = %u, capture_time = %lld",
3747 frame_number, capture_time);
3748
Emilian Peev4e0fe952017-06-30 12:40:09 -07003749 handleDepthDataLocked(metadata->depth_data, frame_number,
3750 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003751
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752 // Check whether any stream buffer corresponding to this is dropped or not
3753 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3754 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3755 for (auto & pendingRequest : mPendingRequestsList) {
3756 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3757 mInstantAECSettledFrameNumber)) {
3758 camera3_notify_msg_t notify_msg = {};
3759 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003760 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003761 QCamera3ProcessingChannel *channel =
3762 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003763 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003764 if (p_cam_frame_drop) {
3765 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003766 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003767 // Got the stream ID for drop frame.
3768 dropFrame = true;
3769 break;
3770 }
3771 }
3772 } else {
3773 // This is instant AEC case.
3774 // For instant AEC drop the stream untill AEC is settled.
3775 dropFrame = true;
3776 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003777
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003778 if (dropFrame) {
3779 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3780 if (p_cam_frame_drop) {
3781 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 } else {
3785 // For instant AEC, inform frame drop and frame number
3786 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3787 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.frame_number, streamID,
3789 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003790 }
3791 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003792 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003793 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003794 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003795 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003796 if (p_cam_frame_drop) {
3797 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003798 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003799 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003800 } else {
3801 // For instant AEC, inform frame drop and frame number
3802 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3803 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003804 pendingRequest.frame_number, streamID,
3805 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003806 }
3807 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 PendingFrameDrop.stream_ID = streamID;
3810 // Add the Frame drop info to mPendingFrameDropList
3811 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003813 }
3814 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003815 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003816
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 for (auto & pendingRequest : mPendingRequestsList) {
3818 // Find the pending request with the frame number.
3819 if (pendingRequest.frame_number == frame_number) {
3820 // Update the sensor timestamp.
3821 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003822
Thierry Strudel3d639192016-09-09 11:52:26 -07003823
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003824 /* Set the timestamp in display metadata so that clients aware of
3825 private_handle such as VT can use this un-modified timestamps.
3826 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003827 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003828
Thierry Strudel3d639192016-09-09 11:52:26 -07003829 // Find channel requiring metadata, meaning internal offline postprocess
3830 // is needed.
3831 //TODO: for now, we don't support two streams requiring metadata at the same time.
3832 // (because we are not making copies, and metadata buffer is not reference counted.
3833 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3835 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003836 if (iter->need_metadata) {
3837 internalPproc = true;
3838 QCamera3ProcessingChannel *channel =
3839 (QCamera3ProcessingChannel *)iter->stream->priv;
3840 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003841 if(p_is_metabuf_queued != NULL) {
3842 *p_is_metabuf_queued = true;
3843 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003844 break;
3845 }
3846 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003847 for (auto itr = pendingRequest.internalRequestList.begin();
3848 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003849 if (itr->need_metadata) {
3850 internalPproc = true;
3851 QCamera3ProcessingChannel *channel =
3852 (QCamera3ProcessingChannel *)itr->stream->priv;
3853 channel->queueReprocMetadata(metadata_buf);
3854 break;
3855 }
3856 }
3857
Thierry Strudel54dc9782017-02-15 12:12:10 -08003858 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003859
3860 bool *enableZsl = nullptr;
3861 if (gExposeEnableZslKey) {
3862 enableZsl = &pendingRequest.enableZsl;
3863 }
3864
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003866 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003867 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003869 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003870
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003871 if (pendingRequest.blob_request) {
3872 //Dump tuning metadata if enabled and available
3873 char prop[PROPERTY_VALUE_MAX];
3874 memset(prop, 0, sizeof(prop));
3875 property_get("persist.camera.dumpmetadata", prop, "0");
3876 int32_t enabled = atoi(prop);
3877 if (enabled && metadata->is_tuning_params_valid) {
3878 dumpMetadataToFile(metadata->tuning_params,
3879 mMetaFrameCount,
3880 enabled,
3881 "Snapshot",
3882 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003883 }
3884 }
3885
3886 if (!internalPproc) {
3887 LOGD("couldn't find need_metadata for this metadata");
3888 // Return metadata buffer
3889 if (free_and_bufdone_meta_buf) {
3890 mMetadataChannel->bufDone(metadata_buf);
3891 free(metadata_buf);
3892 }
3893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003895 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003896 }
3897 }
3898
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003899 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3900
3901 // Try to send out capture result metadata.
3902 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003903 return;
3904
Thierry Strudel3d639192016-09-09 11:52:26 -07003905done_metadata:
3906 for (pendingRequestIterator i = mPendingRequestsList.begin();
3907 i != mPendingRequestsList.end() ;i++) {
3908 i->pipeline_depth++;
3909 }
3910 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3911 unblockRequestIfNecessary();
3912}
3913
3914/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003915 * FUNCTION : handleDepthDataWithLock
3916 *
3917 * DESCRIPTION: Handles incoming depth data
3918 *
3919 * PARAMETERS : @depthData : Depth data
3920 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003921 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003922 *
3923 * RETURN :
3924 *
3925 *==========================================================================*/
3926void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003927 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003928 uint32_t currentFrameNumber;
3929 buffer_handle_t *depthBuffer;
3930
3931 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003932 return;
3933 }
3934
3935 camera3_stream_buffer_t resultBuffer =
3936 {.acquire_fence = -1,
3937 .release_fence = -1,
3938 .status = CAMERA3_BUFFER_STATUS_OK,
3939 .buffer = nullptr,
3940 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003941 do {
3942 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3943 if (nullptr == depthBuffer) {
3944 break;
3945 }
3946
Emilian Peev7650c122017-01-19 08:24:33 -08003947 resultBuffer.buffer = depthBuffer;
3948 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003949 if (valid) {
3950 int32_t rc = mDepthChannel->populateDepthData(depthData,
3951 frameNumber);
3952 if (NO_ERROR != rc) {
3953 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3954 } else {
3955 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3956 }
Emilian Peev7650c122017-01-19 08:24:33 -08003957 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003958 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003959 }
3960 } else if (currentFrameNumber > frameNumber) {
3961 break;
3962 } else {
3963 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3964 {{currentFrameNumber, mDepthChannel->getStream(),
3965 CAMERA3_MSG_ERROR_BUFFER}}};
3966 orchestrateNotify(&notify_msg);
3967
3968 LOGE("Depth buffer for frame number: %d is missing "
3969 "returning back!", currentFrameNumber);
3970 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3971 }
3972 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003973 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003974 } while (currentFrameNumber < frameNumber);
3975}
3976
3977/*===========================================================================
3978 * FUNCTION : notifyErrorFoPendingDepthData
3979 *
3980 * DESCRIPTION: Returns error for any pending depth buffers
3981 *
3982 * PARAMETERS : depthCh - depth channel that needs to get flushed
3983 *
3984 * RETURN :
3985 *
3986 *==========================================================================*/
3987void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3988 QCamera3DepthChannel *depthCh) {
3989 uint32_t currentFrameNumber;
3990 buffer_handle_t *depthBuffer;
3991
3992 if (nullptr == depthCh) {
3993 return;
3994 }
3995
3996 camera3_notify_msg_t notify_msg =
3997 {.type = CAMERA3_MSG_ERROR,
3998 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3999 camera3_stream_buffer_t resultBuffer =
4000 {.acquire_fence = -1,
4001 .release_fence = -1,
4002 .buffer = nullptr,
4003 .stream = depthCh->getStream(),
4004 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004005
4006 while (nullptr !=
4007 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4008 depthCh->unmapBuffer(currentFrameNumber);
4009
4010 notify_msg.message.error.frame_number = currentFrameNumber;
4011 orchestrateNotify(&notify_msg);
4012
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004013 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004014 };
4015}
4016
4017/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004018 * FUNCTION : hdrPlusPerfLock
4019 *
4020 * DESCRIPTION: perf lock for HDR+ using custom intent
4021 *
4022 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4023 *
4024 * RETURN : None
4025 *
4026 *==========================================================================*/
4027void QCamera3HardwareInterface::hdrPlusPerfLock(
4028 mm_camera_super_buf_t *metadata_buf)
4029{
4030 if (NULL == metadata_buf) {
4031 LOGE("metadata_buf is NULL");
4032 return;
4033 }
4034 metadata_buffer_t *metadata =
4035 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4036 int32_t *p_frame_number_valid =
4037 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4038 uint32_t *p_frame_number =
4039 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4040
4041 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4042 LOGE("%s: Invalid metadata", __func__);
4043 return;
4044 }
4045
Wei Wang01385482017-08-03 10:49:34 -07004046 //acquire perf lock for 2 secs after the last HDR frame is captured
4047 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004048 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4049 if ((p_frame_number != NULL) &&
4050 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004051 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004052 }
4053 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004054}
4055
4056/*===========================================================================
4057 * FUNCTION : handleInputBufferWithLock
4058 *
4059 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4060 *
4061 * PARAMETERS : @frame_number: frame number of the input buffer
4062 *
4063 * RETURN :
4064 *
4065 *==========================================================================*/
4066void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4067{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004068 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 pendingRequestIterator i = mPendingRequestsList.begin();
4070 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4071 i++;
4072 }
4073 if (i != mPendingRequestsList.end() && i->input_buffer) {
4074 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004075 CameraMetadata settings;
4076 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4077 if(i->settings) {
4078 settings = i->settings;
4079 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4080 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004081 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004082 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004084 } else {
4085 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004086 }
4087
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004088 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4089 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4090 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004091
4092 camera3_capture_result result;
4093 memset(&result, 0, sizeof(camera3_capture_result));
4094 result.frame_number = frame_number;
4095 result.result = i->settings;
4096 result.input_buffer = i->input_buffer;
4097 result.partial_result = PARTIAL_RESULT_COUNT;
4098
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004099 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 LOGD("Input request metadata and input buffer frame_number = %u",
4101 i->frame_number);
4102 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004103
4104 // Dispatch result metadata that may be just unblocked by this reprocess result.
4105 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004106 } else {
4107 LOGE("Could not find input request for frame number %d", frame_number);
4108 }
4109}
4110
4111/*===========================================================================
4112 * FUNCTION : handleBufferWithLock
4113 *
4114 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4115 *
4116 * PARAMETERS : @buffer: image buffer for the callback
4117 * @frame_number: frame number of the image buffer
4118 *
4119 * RETURN :
4120 *
4121 *==========================================================================*/
4122void QCamera3HardwareInterface::handleBufferWithLock(
4123 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4124{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004125 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004126
4127 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4128 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4129 }
4130
Thierry Strudel3d639192016-09-09 11:52:26 -07004131 /* Nothing to be done during error state */
4132 if ((ERROR == mState) || (DEINIT == mState)) {
4133 return;
4134 }
4135 if (mFlushPerf) {
4136 handleBuffersDuringFlushLock(buffer);
4137 return;
4138 }
4139 //not in flush
4140 // If the frame number doesn't exist in the pending request list,
4141 // directly send the buffer to the frameworks, and update pending buffers map
4142 // Otherwise, book-keep the buffer.
4143 pendingRequestIterator i = mPendingRequestsList.begin();
4144 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4145 i++;
4146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004147
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004148 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004149 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004150 // For a reprocessing request, try to send out result metadata.
4151 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004152 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004153 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004155 // Check if this frame was dropped.
4156 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4157 m != mPendingFrameDropList.end(); m++) {
4158 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4159 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4160 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4161 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4162 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4163 frame_number, streamID);
4164 m = mPendingFrameDropList.erase(m);
4165 break;
4166 }
4167 }
4168
4169 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4170 LOGH("result frame_number = %d, buffer = %p",
4171 frame_number, buffer->buffer);
4172
4173 mPendingBuffersMap.removeBuf(buffer->buffer);
4174 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4175
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004176 if (mPreviewStarted == false) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004179 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4180
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004181 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4182 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4183 mPreviewStarted = true;
4184
4185 // Set power hint for preview
4186 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4187 }
4188 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004189}
4190
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004191void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004192 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004193{
4194 // Find the pending request for this result metadata.
4195 auto requestIter = mPendingRequestsList.begin();
4196 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4197 requestIter++;
4198 }
4199
4200 if (requestIter == mPendingRequestsList.end()) {
4201 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4202 return;
4203 }
4204
4205 // Update the result metadata
4206 requestIter->resultMetadata = resultMetadata;
4207
4208 // Check what type of request this is.
4209 bool liveRequest = false;
4210 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004211 // HDR+ request doesn't have partial results.
4212 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004213 } else if (requestIter->input_buffer != nullptr) {
4214 // Reprocessing request result is the same as settings.
4215 requestIter->resultMetadata = requestIter->settings;
4216 // Reprocessing request doesn't have partial results.
4217 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4218 } else {
4219 liveRequest = true;
4220 requestIter->partial_result_cnt++;
4221 mPendingLiveRequest--;
4222
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004223 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004224 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004225 // For a live request, send the metadata to HDR+ client.
4226 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4227 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4228 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4229 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004230 }
4231 }
4232
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004233 // Remove len shading map if it's not requested.
4234 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4235 CameraMetadata metadata;
4236 metadata.acquire(resultMetadata);
4237 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4238 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4239 &requestIter->requestedLensShadingMapMode, 1);
4240
4241 requestIter->resultMetadata = metadata.release();
4242 }
4243
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004244 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4245}
4246
4247void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4248 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004249 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4250 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 bool readyToSend = true;
4252
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004253 // Iterate through the pending requests to send out result metadata that are ready. Also if
4254 // this result metadata belongs to a live request, notify errors for previous live requests
4255 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004256 auto iter = mPendingRequestsList.begin();
4257 while (iter != mPendingRequestsList.end()) {
4258 // Check if current pending request is ready. If it's not ready, the following pending
4259 // requests are also not ready.
4260 if (readyToSend && iter->resultMetadata == nullptr) {
4261 readyToSend = false;
4262 }
4263
4264 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4265
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004266 camera3_capture_result_t result = {};
4267 result.frame_number = iter->frame_number;
4268 result.result = iter->resultMetadata;
4269 result.partial_result = iter->partial_result_cnt;
4270
4271 // If this pending buffer has result metadata, we may be able to send out shutter callback
4272 // and result metadata.
4273 if (iter->resultMetadata != nullptr) {
4274 if (!readyToSend) {
4275 // If any of the previous pending request is not ready, this pending request is
4276 // also not ready to send in order to keep shutter callbacks and result metadata
4277 // in order.
4278 iter++;
4279 continue;
4280 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004281 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004282 // If the result metadata belongs to a live request, notify errors for previous pending
4283 // live requests.
4284 mPendingLiveRequest--;
4285
4286 CameraMetadata dummyMetadata;
4287 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4288 result.result = dummyMetadata.release();
4289
4290 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004291
4292 // partial_result should be PARTIAL_RESULT_CNT in case of
4293 // ERROR_RESULT.
4294 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4295 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004296 } else {
4297 iter++;
4298 continue;
4299 }
4300
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004301 result.output_buffers = nullptr;
4302 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004303 orchestrateResult(&result);
4304
4305 // For reprocessing, result metadata is the same as settings so do not free it here to
4306 // avoid double free.
4307 if (result.result != iter->settings) {
4308 free_camera_metadata((camera_metadata_t *)result.result);
4309 }
4310 iter->resultMetadata = nullptr;
4311 iter = erasePendingRequest(iter);
4312 }
4313
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004314 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004315 for (auto &iter : mPendingRequestsList) {
4316 // Increment pipeline depth for the following pending requests.
4317 if (iter.frame_number > frameNumber) {
4318 iter.pipeline_depth++;
4319 }
4320 }
4321 }
4322
4323 unblockRequestIfNecessary();
4324}
4325
Thierry Strudel3d639192016-09-09 11:52:26 -07004326/*===========================================================================
4327 * FUNCTION : unblockRequestIfNecessary
4328 *
4329 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4330 * that mMutex is held when this function is called.
4331 *
4332 * PARAMETERS :
4333 *
4334 * RETURN :
4335 *
4336 *==========================================================================*/
4337void QCamera3HardwareInterface::unblockRequestIfNecessary()
4338{
4339 // Unblock process_capture_request
4340 pthread_cond_signal(&mRequestCond);
4341}
4342
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004343/*===========================================================================
4344 * FUNCTION : isHdrSnapshotRequest
4345 *
4346 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4347 *
4348 * PARAMETERS : camera3 request structure
4349 *
4350 * RETURN : boolean decision variable
4351 *
4352 *==========================================================================*/
4353bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4354{
4355 if (request == NULL) {
4356 LOGE("Invalid request handle");
4357 assert(0);
4358 return false;
4359 }
4360
4361 if (!mForceHdrSnapshot) {
4362 CameraMetadata frame_settings;
4363 frame_settings = request->settings;
4364
4365 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4366 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4367 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4368 return false;
4369 }
4370 } else {
4371 return false;
4372 }
4373
4374 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4375 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4376 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4377 return false;
4378 }
4379 } else {
4380 return false;
4381 }
4382 }
4383
4384 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4385 if (request->output_buffers[i].stream->format
4386 == HAL_PIXEL_FORMAT_BLOB) {
4387 return true;
4388 }
4389 }
4390
4391 return false;
4392}
4393/*===========================================================================
4394 * FUNCTION : orchestrateRequest
4395 *
4396 * DESCRIPTION: Orchestrates a capture request from camera service
4397 *
4398 * PARAMETERS :
4399 * @request : request from framework to process
4400 *
4401 * RETURN : Error status codes
4402 *
4403 *==========================================================================*/
4404int32_t QCamera3HardwareInterface::orchestrateRequest(
4405 camera3_capture_request_t *request)
4406{
4407
4408 uint32_t originalFrameNumber = request->frame_number;
4409 uint32_t originalOutputCount = request->num_output_buffers;
4410 const camera_metadata_t *original_settings = request->settings;
4411 List<InternalRequest> internallyRequestedStreams;
4412 List<InternalRequest> emptyInternalList;
4413
4414 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4415 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4416 uint32_t internalFrameNumber;
4417 CameraMetadata modified_meta;
4418
4419
4420 /* Add Blob channel to list of internally requested streams */
4421 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4422 if (request->output_buffers[i].stream->format
4423 == HAL_PIXEL_FORMAT_BLOB) {
4424 InternalRequest streamRequested;
4425 streamRequested.meteringOnly = 1;
4426 streamRequested.need_metadata = 0;
4427 streamRequested.stream = request->output_buffers[i].stream;
4428 internallyRequestedStreams.push_back(streamRequested);
4429 }
4430 }
4431 request->num_output_buffers = 0;
4432 auto itr = internallyRequestedStreams.begin();
4433
4434 /* Modify setting to set compensation */
4435 modified_meta = request->settings;
4436 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4437 uint8_t aeLock = 1;
4438 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4439 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4440 camera_metadata_t *modified_settings = modified_meta.release();
4441 request->settings = modified_settings;
4442
4443 /* Capture Settling & -2x frame */
4444 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4445 request->frame_number = internalFrameNumber;
4446 processCaptureRequest(request, internallyRequestedStreams);
4447
4448 request->num_output_buffers = originalOutputCount;
4449 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4450 request->frame_number = internalFrameNumber;
4451 processCaptureRequest(request, emptyInternalList);
4452 request->num_output_buffers = 0;
4453
4454 modified_meta = modified_settings;
4455 expCompensation = 0;
4456 aeLock = 1;
4457 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4458 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4459 modified_settings = modified_meta.release();
4460 request->settings = modified_settings;
4461
4462 /* Capture Settling & 0X frame */
4463
4464 itr = internallyRequestedStreams.begin();
4465 if (itr == internallyRequestedStreams.end()) {
4466 LOGE("Error Internally Requested Stream list is empty");
4467 assert(0);
4468 } else {
4469 itr->need_metadata = 0;
4470 itr->meteringOnly = 1;
4471 }
4472
4473 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4474 request->frame_number = internalFrameNumber;
4475 processCaptureRequest(request, internallyRequestedStreams);
4476
4477 itr = internallyRequestedStreams.begin();
4478 if (itr == internallyRequestedStreams.end()) {
4479 ALOGE("Error Internally Requested Stream list is empty");
4480 assert(0);
4481 } else {
4482 itr->need_metadata = 1;
4483 itr->meteringOnly = 0;
4484 }
4485
4486 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4487 request->frame_number = internalFrameNumber;
4488 processCaptureRequest(request, internallyRequestedStreams);
4489
4490 /* Capture 2X frame*/
4491 modified_meta = modified_settings;
4492 expCompensation = GB_HDR_2X_STEP_EV;
4493 aeLock = 1;
4494 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4495 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4496 modified_settings = modified_meta.release();
4497 request->settings = modified_settings;
4498
4499 itr = internallyRequestedStreams.begin();
4500 if (itr == internallyRequestedStreams.end()) {
4501 ALOGE("Error Internally Requested Stream list is empty");
4502 assert(0);
4503 } else {
4504 itr->need_metadata = 0;
4505 itr->meteringOnly = 1;
4506 }
4507 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4508 request->frame_number = internalFrameNumber;
4509 processCaptureRequest(request, internallyRequestedStreams);
4510
4511 itr = internallyRequestedStreams.begin();
4512 if (itr == internallyRequestedStreams.end()) {
4513 ALOGE("Error Internally Requested Stream list is empty");
4514 assert(0);
4515 } else {
4516 itr->need_metadata = 1;
4517 itr->meteringOnly = 0;
4518 }
4519
4520 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4521 request->frame_number = internalFrameNumber;
4522 processCaptureRequest(request, internallyRequestedStreams);
4523
4524
4525 /* Capture 2X on original streaming config*/
4526 internallyRequestedStreams.clear();
4527
4528 /* Restore original settings pointer */
4529 request->settings = original_settings;
4530 } else {
4531 uint32_t internalFrameNumber;
4532 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4533 request->frame_number = internalFrameNumber;
4534 return processCaptureRequest(request, internallyRequestedStreams);
4535 }
4536
4537 return NO_ERROR;
4538}
4539
4540/*===========================================================================
4541 * FUNCTION : orchestrateResult
4542 *
4543 * DESCRIPTION: Orchestrates a capture result to camera service
4544 *
4545 * PARAMETERS :
4546 * @request : request from framework to process
4547 *
4548 * RETURN :
4549 *
4550 *==========================================================================*/
4551void QCamera3HardwareInterface::orchestrateResult(
4552 camera3_capture_result_t *result)
4553{
4554 uint32_t frameworkFrameNumber;
4555 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4556 frameworkFrameNumber);
4557 if (rc != NO_ERROR) {
4558 LOGE("Cannot find translated frameworkFrameNumber");
4559 assert(0);
4560 } else {
4561 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004562 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004563 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004564 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004565 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4566 camera_metadata_entry_t entry;
4567 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4568 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004569 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004570 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4571 if (ret != OK)
4572 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004573 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004574 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004575 result->frame_number = frameworkFrameNumber;
4576 mCallbackOps->process_capture_result(mCallbackOps, result);
4577 }
4578 }
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : orchestrateNotify
4583 *
4584 * DESCRIPTION: Orchestrates a notify to camera service
4585 *
4586 * PARAMETERS :
4587 * @request : request from framework to process
4588 *
4589 * RETURN :
4590 *
4591 *==========================================================================*/
4592void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4593{
4594 uint32_t frameworkFrameNumber;
4595 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004596 int32_t rc = NO_ERROR;
4597
4598 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004599 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004600
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004602 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4603 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4604 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004605 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004606 LOGE("Cannot find translated frameworkFrameNumber");
4607 assert(0);
4608 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004609 }
4610 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004611
4612 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4613 LOGD("Internal Request drop the notifyCb");
4614 } else {
4615 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4616 mCallbackOps->notify(mCallbackOps, notify_msg);
4617 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004618}
4619
4620/*===========================================================================
4621 * FUNCTION : FrameNumberRegistry
4622 *
4623 * DESCRIPTION: Constructor
4624 *
4625 * PARAMETERS :
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630FrameNumberRegistry::FrameNumberRegistry()
4631{
4632 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4633}
4634
4635/*===========================================================================
4636 * FUNCTION : ~FrameNumberRegistry
4637 *
4638 * DESCRIPTION: Destructor
4639 *
4640 * PARAMETERS :
4641 *
4642 * RETURN :
4643 *
4644 *==========================================================================*/
4645FrameNumberRegistry::~FrameNumberRegistry()
4646{
4647}
4648
4649/*===========================================================================
4650 * FUNCTION : PurgeOldEntriesLocked
4651 *
4652 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4653 *
4654 * PARAMETERS :
4655 *
4656 * RETURN : NONE
4657 *
4658 *==========================================================================*/
4659void FrameNumberRegistry::purgeOldEntriesLocked()
4660{
4661 while (_register.begin() != _register.end()) {
4662 auto itr = _register.begin();
4663 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4664 _register.erase(itr);
4665 } else {
4666 return;
4667 }
4668 }
4669}
4670
4671/*===========================================================================
4672 * FUNCTION : allocStoreInternalFrameNumber
4673 *
4674 * DESCRIPTION: Method to note down a framework request and associate a new
4675 * internal request number against it
4676 *
4677 * PARAMETERS :
4678 * @fFrameNumber: Identifier given by framework
4679 * @internalFN : Output parameter which will have the newly generated internal
4680 * entry
4681 *
4682 * RETURN : Error code
4683 *
4684 *==========================================================================*/
4685int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4686 uint32_t &internalFrameNumber)
4687{
4688 Mutex::Autolock lock(mRegistryLock);
4689 internalFrameNumber = _nextFreeInternalNumber++;
4690 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4691 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4692 purgeOldEntriesLocked();
4693 return NO_ERROR;
4694}
4695
4696/*===========================================================================
4697 * FUNCTION : generateStoreInternalFrameNumber
4698 *
4699 * DESCRIPTION: Method to associate a new internal request number independent
4700 * of any associate with framework requests
4701 *
4702 * PARAMETERS :
4703 * @internalFrame#: Output parameter which will have the newly generated internal
4704 *
4705 *
4706 * RETURN : Error code
4707 *
4708 *==========================================================================*/
4709int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4710{
4711 Mutex::Autolock lock(mRegistryLock);
4712 internalFrameNumber = _nextFreeInternalNumber++;
4713 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4714 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4715 purgeOldEntriesLocked();
4716 return NO_ERROR;
4717}
4718
4719/*===========================================================================
4720 * FUNCTION : getFrameworkFrameNumber
4721 *
4722 * DESCRIPTION: Method to query the framework framenumber given an internal #
4723 *
4724 * PARAMETERS :
4725 * @internalFrame#: Internal reference
4726 * @frameworkframenumber: Output parameter holding framework frame entry
4727 *
4728 * RETURN : Error code
4729 *
4730 *==========================================================================*/
4731int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4732 uint32_t &frameworkFrameNumber)
4733{
4734 Mutex::Autolock lock(mRegistryLock);
4735 auto itr = _register.find(internalFrameNumber);
4736 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004737 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004738 return -ENOENT;
4739 }
4740
4741 frameworkFrameNumber = itr->second;
4742 purgeOldEntriesLocked();
4743 return NO_ERROR;
4744}
Thierry Strudel3d639192016-09-09 11:52:26 -07004745
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004746status_t QCamera3HardwareInterface::fillPbStreamConfig(
4747 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4748 QCamera3Channel *channel, uint32_t streamIndex) {
4749 if (config == nullptr) {
4750 LOGE("%s: config is null", __FUNCTION__);
4751 return BAD_VALUE;
4752 }
4753
4754 if (channel == nullptr) {
4755 LOGE("%s: channel is null", __FUNCTION__);
4756 return BAD_VALUE;
4757 }
4758
4759 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4760 if (stream == nullptr) {
4761 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4762 return NAME_NOT_FOUND;
4763 }
4764
4765 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4766 if (streamInfo == nullptr) {
4767 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4768 return NAME_NOT_FOUND;
4769 }
4770
4771 config->id = pbStreamId;
4772 config->image.width = streamInfo->dim.width;
4773 config->image.height = streamInfo->dim.height;
4774 config->image.padding = 0;
4775 config->image.format = pbStreamFormat;
4776
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004777 uint32_t totalPlaneSize = 0;
4778
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004779 // Fill plane information.
4780 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4781 pbcamera::PlaneConfiguration plane;
4782 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4783 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4784 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004785
4786 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 }
4788
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004789 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004790 return OK;
4791}
4792
Thierry Strudel3d639192016-09-09 11:52:26 -07004793/*===========================================================================
4794 * FUNCTION : processCaptureRequest
4795 *
4796 * DESCRIPTION: process a capture request from camera service
4797 *
4798 * PARAMETERS :
4799 * @request : request from framework to process
4800 *
4801 * RETURN :
4802 *
4803 *==========================================================================*/
4804int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 camera3_capture_request_t *request,
4806 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004807{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004808 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 int rc = NO_ERROR;
4810 int32_t request_id;
4811 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 bool isVidBufRequested = false;
4813 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004814 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004815
4816 pthread_mutex_lock(&mMutex);
4817
4818 // Validate current state
4819 switch (mState) {
4820 case CONFIGURED:
4821 case STARTED:
4822 /* valid state */
4823 break;
4824
4825 case ERROR:
4826 pthread_mutex_unlock(&mMutex);
4827 handleCameraDeviceError();
4828 return -ENODEV;
4829
4830 default:
4831 LOGE("Invalid state %d", mState);
4832 pthread_mutex_unlock(&mMutex);
4833 return -ENODEV;
4834 }
4835
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004836 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 if (rc != NO_ERROR) {
4838 LOGE("incoming request is not valid");
4839 pthread_mutex_unlock(&mMutex);
4840 return rc;
4841 }
4842
4843 meta = request->settings;
4844
4845 // For first capture request, send capture intent, and
4846 // stream on all streams
4847 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004848 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 // send an unconfigure to the backend so that the isp
4850 // resources are deallocated
4851 if (!mFirstConfiguration) {
4852 cam_stream_size_info_t stream_config_info;
4853 int32_t hal_version = CAM_HAL_V3;
4854 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4855 stream_config_info.buffer_info.min_buffers =
4856 MIN_INFLIGHT_REQUESTS;
4857 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004858 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004859 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004860 clear_metadata_buffer(mParameters);
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4862 CAM_INTF_PARM_HAL_VERSION, hal_version);
4863 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4864 CAM_INTF_META_STREAM_INFO, stream_config_info);
4865 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4866 mParameters);
4867 if (rc < 0) {
4868 LOGE("set_parms for unconfigure failed");
4869 pthread_mutex_unlock(&mMutex);
4870 return rc;
4871 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004872
Thierry Strudel3d639192016-09-09 11:52:26 -07004873 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004874 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004875 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 property_get("persist.camera.is_type", is_type_value, "4");
4879 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4880 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4881 property_get("persist.camera.is_type_preview", is_type_value, "4");
4882 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4883 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004884
4885 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4886 int32_t hal_version = CAM_HAL_V3;
4887 uint8_t captureIntent =
4888 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4889 mCaptureIntent = captureIntent;
4890 clear_metadata_buffer(mParameters);
4891 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4892 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4893 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004894 if (mFirstConfiguration) {
4895 // configure instant AEC
4896 // Instant AEC is a session based parameter and it is needed only
4897 // once per complete session after open camera.
4898 // i.e. This is set only once for the first capture request, after open camera.
4899 setInstantAEC(meta);
4900 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901 uint8_t fwkVideoStabMode=0;
4902 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4903 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4904 }
4905
Xue Tuecac74e2017-04-17 13:58:15 -07004906 // If EIS setprop is enabled then only turn it on for video/preview
4907 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004908 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 int32_t vsMode;
4910 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4912 rc = BAD_VALUE;
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914 LOGD("setEis %d", setEis);
4915 bool eis3Supported = false;
4916 size_t count = IS_TYPE_MAX;
4917 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4918 for (size_t i = 0; i < count; i++) {
4919 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4920 eis3Supported = true;
4921 break;
4922 }
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924
4925 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004926 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4928 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004929 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4930 is_type = isTypePreview;
4931 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4932 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4933 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004934 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004935 } else {
4936 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004937 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938 } else {
4939 is_type = IS_TYPE_NONE;
4940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4944 }
4945 }
4946
4947 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4948 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4949
Thierry Strudel54dc9782017-02-15 12:12:10 -08004950 //Disable tintless only if the property is set to 0
4951 memset(prop, 0, sizeof(prop));
4952 property_get("persist.camera.tintless.enable", prop, "1");
4953 int32_t tintless_value = atoi(prop);
4954
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4956 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004957
Thierry Strudel3d639192016-09-09 11:52:26 -07004958 //Disable CDS for HFR mode or if DIS/EIS is on.
4959 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4960 //after every configure_stream
4961 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4962 (m_bIsVideo)) {
4963 int32_t cds = CAM_CDS_MODE_OFF;
4964 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4965 CAM_INTF_PARM_CDS_MODE, cds))
4966 LOGE("Failed to disable CDS for HFR mode");
4967
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969
4970 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4971 uint8_t* use_av_timer = NULL;
4972
4973 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004974 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004975 use_av_timer = &m_debug_avtimer;
4976 }
4977 else{
4978 use_av_timer =
4979 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004980 if (use_av_timer) {
4981 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4982 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 }
4984
4985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4986 rc = BAD_VALUE;
4987 }
4988 }
4989
Thierry Strudel3d639192016-09-09 11:52:26 -07004990 setMobicat();
4991
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004992 uint8_t nrMode = 0;
4993 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4994 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4995 }
4996
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 /* Set fps and hfr mode while sending meta stream info so that sensor
4998 * can configure appropriate streaming mode */
4999 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005000 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5001 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005002 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5003 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005004 if (rc == NO_ERROR) {
5005 int32_t max_fps =
5006 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005007 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5009 }
5010 /* For HFR, more buffers are dequeued upfront to improve the performance */
5011 if (mBatchSize) {
5012 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5013 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5014 }
5015 }
5016 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 LOGE("setHalFpsRange failed");
5018 }
5019 }
5020 if (meta.exists(ANDROID_CONTROL_MODE)) {
5021 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5022 rc = extractSceneMode(meta, metaMode, mParameters);
5023 if (rc != NO_ERROR) {
5024 LOGE("extractSceneMode failed");
5025 }
5026 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005027 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005028
Thierry Strudel04e026f2016-10-10 11:27:36 -07005029 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5030 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5031 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5032 rc = setVideoHdrMode(mParameters, vhdr);
5033 if (rc != NO_ERROR) {
5034 LOGE("setVideoHDR is failed");
5035 }
5036 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005037
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005038 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005039 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005040 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005041 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5043 sensorModeFullFov)) {
5044 rc = BAD_VALUE;
5045 }
5046 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005047 //TODO: validate the arguments, HSV scenemode should have only the
5048 //advertised fps ranges
5049
5050 /*set the capture intent, hal version, tintless, stream info,
5051 *and disenable parameters to the backend*/
5052 LOGD("set_parms META_STREAM_INFO " );
5053 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005054 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5055 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 mStreamConfigInfo.type[i],
5057 mStreamConfigInfo.stream_sizes[i].width,
5058 mStreamConfigInfo.stream_sizes[i].height,
5059 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 mStreamConfigInfo.format[i],
5061 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005063
Thierry Strudel3d639192016-09-09 11:52:26 -07005064 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5065 mParameters);
5066 if (rc < 0) {
5067 LOGE("set_parms failed for hal version, stream info");
5068 }
5069
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005070 cam_sensor_mode_info_t sensorModeInfo = {};
5071 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 if (rc != NO_ERROR) {
5073 LOGE("Failed to get sensor output size");
5074 pthread_mutex_unlock(&mMutex);
5075 goto error_exit;
5076 }
5077
5078 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5079 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005080 sensorModeInfo.active_array_size.width,
5081 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005082
5083 /* Set batchmode before initializing channel. Since registerBuffer
5084 * internally initializes some of the channels, better set batchmode
5085 * even before first register buffer */
5086 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5087 it != mStreamInfo.end(); it++) {
5088 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5089 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5090 && mBatchSize) {
5091 rc = channel->setBatchSize(mBatchSize);
5092 //Disable per frame map unmap for HFR/batchmode case
5093 rc |= channel->setPerFrameMapUnmap(false);
5094 if (NO_ERROR != rc) {
5095 LOGE("Channel init failed %d", rc);
5096 pthread_mutex_unlock(&mMutex);
5097 goto error_exit;
5098 }
5099 }
5100 }
5101
5102 //First initialize all streams
5103 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5104 it != mStreamInfo.end(); it++) {
5105 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005106
5107 /* Initial value of NR mode is needed before stream on */
5108 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5110 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005111 setEis) {
5112 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5113 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5114 is_type = mStreamConfigInfo.is_type[i];
5115 break;
5116 }
5117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = channel->initialize(IS_TYPE_NONE);
5121 }
5122 if (NO_ERROR != rc) {
5123 LOGE("Channel initialization failed %d", rc);
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
5127 }
5128
5129 if (mRawDumpChannel) {
5130 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5131 if (rc != NO_ERROR) {
5132 LOGE("Error: Raw Dump Channel init failed");
5133 pthread_mutex_unlock(&mMutex);
5134 goto error_exit;
5135 }
5136 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005137 if (mHdrPlusRawSrcChannel) {
5138 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5139 if (rc != NO_ERROR) {
5140 LOGE("Error: HDR+ RAW Source Channel init failed");
5141 pthread_mutex_unlock(&mMutex);
5142 goto error_exit;
5143 }
5144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 if (mSupportChannel) {
5146 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5147 if (rc < 0) {
5148 LOGE("Support channel initialization failed");
5149 pthread_mutex_unlock(&mMutex);
5150 goto error_exit;
5151 }
5152 }
5153 if (mAnalysisChannel) {
5154 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5155 if (rc < 0) {
5156 LOGE("Analysis channel initialization failed");
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161 if (mDummyBatchChannel) {
5162 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5163 if (rc < 0) {
5164 LOGE("mDummyBatchChannel setBatchSize failed");
5165 pthread_mutex_unlock(&mMutex);
5166 goto error_exit;
5167 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005168 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 if (rc < 0) {
5170 LOGE("mDummyBatchChannel initialization failed");
5171 pthread_mutex_unlock(&mMutex);
5172 goto error_exit;
5173 }
5174 }
5175
5176 // Set bundle info
5177 rc = setBundleInfo();
5178 if (rc < 0) {
5179 LOGE("setBundleInfo failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183
5184 //update settings from app here
5185 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5186 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5187 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5188 }
5189 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5190 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5191 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5192 }
5193 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5194 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5195 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5196
5197 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5198 (mLinkedCameraId != mCameraId) ) {
5199 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5200 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 goto error_exit;
5203 }
5204 }
5205
5206 // add bundle related cameras
5207 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5208 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005209 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5210 &m_pDualCamCmdPtr->bundle_info;
5211 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005212 if (mIsDeviceLinked)
5213 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5214 else
5215 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5216
5217 pthread_mutex_lock(&gCamLock);
5218
5219 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5220 LOGE("Dualcam: Invalid Session Id ");
5221 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005222 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 goto error_exit;
5224 }
5225
5226 if (mIsMainCamera == 1) {
5227 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5228 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005229 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005230 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 // related session id should be session id of linked session
5232 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5233 } else {
5234 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5235 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005236 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005237 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005238 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5239 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005240 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 pthread_mutex_unlock(&gCamLock);
5242
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005243 rc = mCameraHandle->ops->set_dual_cam_cmd(
5244 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 if (rc < 0) {
5246 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005247 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005248 goto error_exit;
5249 }
5250 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 goto no_error;
5252error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005253 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005254 return rc;
5255no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005256 mWokenUpByDaemon = false;
5257 mPendingLiveRequest = 0;
5258 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 }
5260
5261 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005262 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005263
5264 if (mFlushPerf) {
5265 //we cannot accept any requests during flush
5266 LOGE("process_capture_request cannot proceed during flush");
5267 pthread_mutex_unlock(&mMutex);
5268 return NO_ERROR; //should return an error
5269 }
5270
5271 if (meta.exists(ANDROID_REQUEST_ID)) {
5272 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5273 mCurrentRequestId = request_id;
5274 LOGD("Received request with id: %d", request_id);
5275 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5276 LOGE("Unable to find request id field, \
5277 & no previous id available");
5278 pthread_mutex_unlock(&mMutex);
5279 return NAME_NOT_FOUND;
5280 } else {
5281 LOGD("Re-using old request id");
5282 request_id = mCurrentRequestId;
5283 }
5284
5285 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5286 request->num_output_buffers,
5287 request->input_buffer,
5288 frameNumber);
5289 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005292 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 uint32_t snapshotStreamId = 0;
5294 for (size_t i = 0; i < request->num_output_buffers; i++) {
5295 const camera3_stream_buffer_t& output = request->output_buffers[i];
5296 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5297
Emilian Peev7650c122017-01-19 08:24:33 -08005298 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5299 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005300 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 blob_request = 1;
5302 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5303 }
5304
5305 if (output.acquire_fence != -1) {
5306 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5307 close(output.acquire_fence);
5308 if (rc != OK) {
5309 LOGE("sync wait failed %d", rc);
5310 pthread_mutex_unlock(&mMutex);
5311 return rc;
5312 }
5313 }
5314
Emilian Peev0f3c3162017-03-15 12:57:46 +00005315 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5316 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005317 depthRequestPresent = true;
5318 continue;
5319 }
5320
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005323
5324 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5325 isVidBufRequested = true;
5326 }
5327 }
5328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005329 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5330 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5331 itr++) {
5332 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5333 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5334 channel->getStreamID(channel->getStreamTypeMask());
5335
5336 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5337 isVidBufRequested = true;
5338 }
5339 }
5340
Thierry Strudel3d639192016-09-09 11:52:26 -07005341 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005342 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005343 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005344 }
5345 if (blob_request && mRawDumpChannel) {
5346 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005347 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005348 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005349 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 }
5351
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 {
5353 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5354 // Request a RAW buffer if
5355 // 1. mHdrPlusRawSrcChannel is valid.
5356 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5357 // 3. There is no pending HDR+ request.
5358 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5359 mHdrPlusPendingRequests.size() == 0) {
5360 streamsArray.stream_request[streamsArray.num_streams].streamID =
5361 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5362 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5363 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005364 }
5365
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005366 //extract capture intent
5367 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5368 mCaptureIntent =
5369 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5370 }
5371
5372 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5373 mCacMode =
5374 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5375 }
5376
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005377 uint8_t requestedLensShadingMapMode;
5378 // Get the shading map mode.
5379 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5380 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5381 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5382 } else {
5383 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5384 }
5385
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005386 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005387 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005389 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005390 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005391 // If this request has a still capture intent, try to submit an HDR+ request.
5392 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5393 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5394 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5395 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005396 }
5397
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005398 if (hdrPlusRequest) {
5399 // For a HDR+ request, just set the frame parameters.
5400 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5401 if (rc < 0) {
5402 LOGE("fail to set frame parameters");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
5406 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 /* Parse the settings:
5408 * - For every request in NORMAL MODE
5409 * - For every request in HFR mode during preview only case
5410 * - For first request of every batch in HFR mode during video
5411 * recording. In batchmode the same settings except frame number is
5412 * repeated in each request of the batch.
5413 */
5414 if (!mBatchSize ||
5415 (mBatchSize && !isVidBufRequested) ||
5416 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005417 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005418 if (rc < 0) {
5419 LOGE("fail to set frame parameters");
5420 pthread_mutex_unlock(&mMutex);
5421 return rc;
5422 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005423
5424 {
5425 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5426 // will be reported in result metadata.
5427 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5428 if (mHdrPlusModeEnabled) {
5429 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5430 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5431 }
5432 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005433 }
5434 /* For batchMode HFR, setFrameParameters is not called for every
5435 * request. But only frame number of the latest request is parsed.
5436 * Keep track of first and last frame numbers in a batch so that
5437 * metadata for the frame numbers of batch can be duplicated in
5438 * handleBatchMetadta */
5439 if (mBatchSize) {
5440 if (!mToBeQueuedVidBufs) {
5441 //start of the batch
5442 mFirstFrameNumberInBatch = request->frame_number;
5443 }
5444 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5445 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5446 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005447 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005448 return BAD_VALUE;
5449 }
5450 }
5451 if (mNeedSensorRestart) {
5452 /* Unlock the mutex as restartSensor waits on the channels to be
5453 * stopped, which in turn calls stream callback functions -
5454 * handleBufferWithLock and handleMetadataWithLock */
5455 pthread_mutex_unlock(&mMutex);
5456 rc = dynamicUpdateMetaStreamInfo();
5457 if (rc != NO_ERROR) {
5458 LOGE("Restarting the sensor failed");
5459 return BAD_VALUE;
5460 }
5461 mNeedSensorRestart = false;
5462 pthread_mutex_lock(&mMutex);
5463 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005464 if(mResetInstantAEC) {
5465 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5466 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5467 mResetInstantAEC = false;
5468 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005469 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005470 if (request->input_buffer->acquire_fence != -1) {
5471 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5472 close(request->input_buffer->acquire_fence);
5473 if (rc != OK) {
5474 LOGE("input buffer sync wait failed %d", rc);
5475 pthread_mutex_unlock(&mMutex);
5476 return rc;
5477 }
5478 }
5479 }
5480
5481 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5482 mLastCustIntentFrmNum = frameNumber;
5483 }
5484 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005485 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 pendingRequestIterator latestRequest;
5487 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005488 pendingRequest.num_buffers = depthRequestPresent ?
5489 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 pendingRequest.request_id = request_id;
5491 pendingRequest.blob_request = blob_request;
5492 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005493 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 if (request->input_buffer) {
5495 pendingRequest.input_buffer =
5496 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5497 *(pendingRequest.input_buffer) = *(request->input_buffer);
5498 pInputBuffer = pendingRequest.input_buffer;
5499 } else {
5500 pendingRequest.input_buffer = NULL;
5501 pInputBuffer = NULL;
5502 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005503 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504
5505 pendingRequest.pipeline_depth = 0;
5506 pendingRequest.partial_result_cnt = 0;
5507 extractJpegMetadata(mCurJpegMeta, request);
5508 pendingRequest.jpegMetadata = mCurJpegMeta;
5509 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005510 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005511 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5512 mHybridAeEnable =
5513 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5514 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005515
5516 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5517 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005518 /* DevCamDebug metadata processCaptureRequest */
5519 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5520 mDevCamDebugMetaEnable =
5521 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5522 }
5523 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5524 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005525
5526 //extract CAC info
5527 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5528 mCacMode =
5529 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5530 }
5531 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005532 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005533 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5534 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005535
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005536 // extract enableZsl info
5537 if (gExposeEnableZslKey) {
5538 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5539 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5540 mZslEnabled = pendingRequest.enableZsl;
5541 } else {
5542 pendingRequest.enableZsl = mZslEnabled;
5543 }
5544 }
5545
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 PendingBuffersInRequest bufsForCurRequest;
5547 bufsForCurRequest.frame_number = frameNumber;
5548 // Mark current timestamp for the new request
5549 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005550 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005551
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005552 if (hdrPlusRequest) {
5553 // Save settings for this request.
5554 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5555 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5556
5557 // Add to pending HDR+ request queue.
5558 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5559 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5560
5561 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5562 }
5563
Thierry Strudel3d639192016-09-09 11:52:26 -07005564 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005565 if ((request->output_buffers[i].stream->data_space ==
5566 HAL_DATASPACE_DEPTH) &&
5567 (HAL_PIXEL_FORMAT_BLOB ==
5568 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005569 continue;
5570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005571 RequestedBufferInfo requestedBuf;
5572 memset(&requestedBuf, 0, sizeof(requestedBuf));
5573 requestedBuf.stream = request->output_buffers[i].stream;
5574 requestedBuf.buffer = NULL;
5575 pendingRequest.buffers.push_back(requestedBuf);
5576
5577 // Add to buffer handle the pending buffers list
5578 PendingBufferInfo bufferInfo;
5579 bufferInfo.buffer = request->output_buffers[i].buffer;
5580 bufferInfo.stream = request->output_buffers[i].stream;
5581 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5582 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5583 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5584 frameNumber, bufferInfo.buffer,
5585 channel->getStreamTypeMask(), bufferInfo.stream->format);
5586 }
5587 // Add this request packet into mPendingBuffersMap
5588 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5589 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5590 mPendingBuffersMap.get_num_overall_buffers());
5591
5592 latestRequest = mPendingRequestsList.insert(
5593 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005594
5595 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5596 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005597 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005598 for (size_t i = 0; i < request->num_output_buffers; i++) {
5599 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5600 }
5601
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 if(mFlush) {
5603 LOGI("mFlush is true");
5604 pthread_mutex_unlock(&mMutex);
5605 return NO_ERROR;
5606 }
5607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5609 // channel.
5610 if (!hdrPlusRequest) {
5611 int indexUsed;
5612 // Notify metadata channel we receive a request
5613 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005614
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615 if(request->input_buffer != NULL){
5616 LOGD("Input request, frame_number %d", frameNumber);
5617 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5618 if (NO_ERROR != rc) {
5619 LOGE("fail to set reproc parameters");
5620 pthread_mutex_unlock(&mMutex);
5621 return rc;
5622 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005623 }
5624
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005625 // Call request on other streams
5626 uint32_t streams_need_metadata = 0;
5627 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5628 for (size_t i = 0; i < request->num_output_buffers; i++) {
5629 const camera3_stream_buffer_t& output = request->output_buffers[i];
5630 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5631
5632 if (channel == NULL) {
5633 LOGW("invalid channel pointer for stream");
5634 continue;
5635 }
5636
5637 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5638 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5639 output.buffer, request->input_buffer, frameNumber);
5640 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5643 if (rc < 0) {
5644 LOGE("Fail to request on picture channel");
5645 pthread_mutex_unlock(&mMutex);
5646 return rc;
5647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005648 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005649 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5650 assert(NULL != mDepthChannel);
5651 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652
Emilian Peev7650c122017-01-19 08:24:33 -08005653 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5654 if (rc < 0) {
5655 LOGE("Fail to map on depth buffer");
5656 pthread_mutex_unlock(&mMutex);
5657 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005659 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005660 } else {
5661 LOGD("snapshot request with buffer %p, frame_number %d",
5662 output.buffer, frameNumber);
5663 if (!request->settings) {
5664 rc = channel->request(output.buffer, frameNumber,
5665 NULL, mPrevParameters, indexUsed);
5666 } else {
5667 rc = channel->request(output.buffer, frameNumber,
5668 NULL, mParameters, indexUsed);
5669 }
5670 if (rc < 0) {
5671 LOGE("Fail to request on picture channel");
5672 pthread_mutex_unlock(&mMutex);
5673 return rc;
5674 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005675
Emilian Peev7650c122017-01-19 08:24:33 -08005676 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5677 uint32_t j = 0;
5678 for (j = 0; j < streamsArray.num_streams; j++) {
5679 if (streamsArray.stream_request[j].streamID == streamId) {
5680 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5681 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5682 else
5683 streamsArray.stream_request[j].buf_index = indexUsed;
5684 break;
5685 }
5686 }
5687 if (j == streamsArray.num_streams) {
5688 LOGE("Did not find matching stream to update index");
5689 assert(0);
5690 }
5691
5692 pendingBufferIter->need_metadata = true;
5693 streams_need_metadata++;
5694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005696 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5697 bool needMetadata = false;
5698 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5699 rc = yuvChannel->request(output.buffer, frameNumber,
5700 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5701 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005702 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005703 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005704 pthread_mutex_unlock(&mMutex);
5705 return rc;
5706 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005707
5708 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5709 uint32_t j = 0;
5710 for (j = 0; j < streamsArray.num_streams; j++) {
5711 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005712 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5713 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5714 else
5715 streamsArray.stream_request[j].buf_index = indexUsed;
5716 break;
5717 }
5718 }
5719 if (j == streamsArray.num_streams) {
5720 LOGE("Did not find matching stream to update index");
5721 assert(0);
5722 }
5723
5724 pendingBufferIter->need_metadata = needMetadata;
5725 if (needMetadata)
5726 streams_need_metadata += 1;
5727 LOGD("calling YUV channel request, need_metadata is %d",
5728 needMetadata);
5729 } else {
5730 LOGD("request with buffer %p, frame_number %d",
5731 output.buffer, frameNumber);
5732
5733 rc = channel->request(output.buffer, frameNumber, indexUsed);
5734
5735 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5736 uint32_t j = 0;
5737 for (j = 0; j < streamsArray.num_streams; j++) {
5738 if (streamsArray.stream_request[j].streamID == streamId) {
5739 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5740 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5741 else
5742 streamsArray.stream_request[j].buf_index = indexUsed;
5743 break;
5744 }
5745 }
5746 if (j == streamsArray.num_streams) {
5747 LOGE("Did not find matching stream to update index");
5748 assert(0);
5749 }
5750
5751 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5752 && mBatchSize) {
5753 mToBeQueuedVidBufs++;
5754 if (mToBeQueuedVidBufs == mBatchSize) {
5755 channel->queueBatchBuf();
5756 }
5757 }
5758 if (rc < 0) {
5759 LOGE("request failed");
5760 pthread_mutex_unlock(&mMutex);
5761 return rc;
5762 }
5763 }
5764 pendingBufferIter++;
5765 }
5766
5767 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5768 itr++) {
5769 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5770
5771 if (channel == NULL) {
5772 LOGE("invalid channel pointer for stream");
5773 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005774 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 return BAD_VALUE;
5776 }
5777
5778 InternalRequest requestedStream;
5779 requestedStream = (*itr);
5780
5781
5782 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5783 LOGD("snapshot request internally input buffer %p, frame_number %d",
5784 request->input_buffer, frameNumber);
5785 if(request->input_buffer != NULL){
5786 rc = channel->request(NULL, frameNumber,
5787 pInputBuffer, &mReprocMeta, indexUsed, true,
5788 requestedStream.meteringOnly);
5789 if (rc < 0) {
5790 LOGE("Fail to request on picture channel");
5791 pthread_mutex_unlock(&mMutex);
5792 return rc;
5793 }
5794 } else {
5795 LOGD("snapshot request with frame_number %d", frameNumber);
5796 if (!request->settings) {
5797 rc = channel->request(NULL, frameNumber,
5798 NULL, mPrevParameters, indexUsed, true,
5799 requestedStream.meteringOnly);
5800 } else {
5801 rc = channel->request(NULL, frameNumber,
5802 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5803 }
5804 if (rc < 0) {
5805 LOGE("Fail to request on picture channel");
5806 pthread_mutex_unlock(&mMutex);
5807 return rc;
5808 }
5809
5810 if ((*itr).meteringOnly != 1) {
5811 requestedStream.need_metadata = 1;
5812 streams_need_metadata++;
5813 }
5814 }
5815
5816 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5817 uint32_t j = 0;
5818 for (j = 0; j < streamsArray.num_streams; j++) {
5819 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005820 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5821 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5822 else
5823 streamsArray.stream_request[j].buf_index = indexUsed;
5824 break;
5825 }
5826 }
5827 if (j == streamsArray.num_streams) {
5828 LOGE("Did not find matching stream to update index");
5829 assert(0);
5830 }
5831
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005832 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005834 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005835 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005836 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005837 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005838 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005839 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005840
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 //If 2 streams have need_metadata set to true, fail the request, unless
5842 //we copy/reference count the metadata buffer
5843 if (streams_need_metadata > 1) {
5844 LOGE("not supporting request in which two streams requires"
5845 " 2 HAL metadata for reprocessing");
5846 pthread_mutex_unlock(&mMutex);
5847 return -EINVAL;
5848 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005849
Emilian Peev656e4fa2017-06-02 16:47:04 +01005850 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5851 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5852 if (depthRequestPresent && mDepthChannel) {
5853 if (request->settings) {
5854 camera_metadata_ro_entry entry;
5855 if (find_camera_metadata_ro_entry(request->settings,
5856 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5857 if (entry.data.u8[0]) {
5858 pdafEnable = CAM_PD_DATA_ENABLED;
5859 } else {
5860 pdafEnable = CAM_PD_DATA_SKIP;
5861 }
5862 mDepthCloudMode = pdafEnable;
5863 } else {
5864 pdafEnable = mDepthCloudMode;
5865 }
5866 } else {
5867 pdafEnable = mDepthCloudMode;
5868 }
5869 }
5870
Emilian Peev7650c122017-01-19 08:24:33 -08005871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5872 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5873 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5874 pthread_mutex_unlock(&mMutex);
5875 return BAD_VALUE;
5876 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005877
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005878 if (request->input_buffer == NULL) {
5879 /* Set the parameters to backend:
5880 * - For every request in NORMAL MODE
5881 * - For every request in HFR mode during preview only case
5882 * - Once every batch in HFR mode during video recording
5883 */
5884 if (!mBatchSize ||
5885 (mBatchSize && !isVidBufRequested) ||
5886 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5887 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5888 mBatchSize, isVidBufRequested,
5889 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5892 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5893 uint32_t m = 0;
5894 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5895 if (streamsArray.stream_request[k].streamID ==
5896 mBatchedStreamsArray.stream_request[m].streamID)
5897 break;
5898 }
5899 if (m == mBatchedStreamsArray.num_streams) {
5900 mBatchedStreamsArray.stream_request\
5901 [mBatchedStreamsArray.num_streams].streamID =
5902 streamsArray.stream_request[k].streamID;
5903 mBatchedStreamsArray.stream_request\
5904 [mBatchedStreamsArray.num_streams].buf_index =
5905 streamsArray.stream_request[k].buf_index;
5906 mBatchedStreamsArray.num_streams =
5907 mBatchedStreamsArray.num_streams + 1;
5908 }
5909 }
5910 streamsArray = mBatchedStreamsArray;
5911 }
5912 /* Update stream id of all the requested buffers */
5913 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5914 streamsArray)) {
5915 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005916 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005917 return BAD_VALUE;
5918 }
5919
5920 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5921 mParameters);
5922 if (rc < 0) {
5923 LOGE("set_parms failed");
5924 }
5925 /* reset to zero coz, the batch is queued */
5926 mToBeQueuedVidBufs = 0;
5927 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5928 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5929 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005930 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5931 uint32_t m = 0;
5932 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5933 if (streamsArray.stream_request[k].streamID ==
5934 mBatchedStreamsArray.stream_request[m].streamID)
5935 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005936 }
5937 if (m == mBatchedStreamsArray.num_streams) {
5938 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5939 streamID = streamsArray.stream_request[k].streamID;
5940 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5941 buf_index = streamsArray.stream_request[k].buf_index;
5942 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5943 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005944 }
5945 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005947
5948 // Start all streams after the first setting is sent, so that the
5949 // setting can be applied sooner: (0 + apply_delay)th frame.
5950 if (mState == CONFIGURED && mChannelHandle) {
5951 //Then start them.
5952 LOGH("Start META Channel");
5953 rc = mMetadataChannel->start();
5954 if (rc < 0) {
5955 LOGE("META channel start failed");
5956 pthread_mutex_unlock(&mMutex);
5957 return rc;
5958 }
5959
5960 if (mAnalysisChannel) {
5961 rc = mAnalysisChannel->start();
5962 if (rc < 0) {
5963 LOGE("Analysis channel start failed");
5964 mMetadataChannel->stop();
5965 pthread_mutex_unlock(&mMutex);
5966 return rc;
5967 }
5968 }
5969
5970 if (mSupportChannel) {
5971 rc = mSupportChannel->start();
5972 if (rc < 0) {
5973 LOGE("Support channel start failed");
5974 mMetadataChannel->stop();
5975 /* Although support and analysis are mutually exclusive today
5976 adding it in anycase for future proofing */
5977 if (mAnalysisChannel) {
5978 mAnalysisChannel->stop();
5979 }
5980 pthread_mutex_unlock(&mMutex);
5981 return rc;
5982 }
5983 }
5984 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5985 it != mStreamInfo.end(); it++) {
5986 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5987 LOGH("Start Processing Channel mask=%d",
5988 channel->getStreamTypeMask());
5989 rc = channel->start();
5990 if (rc < 0) {
5991 LOGE("channel start failed");
5992 pthread_mutex_unlock(&mMutex);
5993 return rc;
5994 }
5995 }
5996
5997 if (mRawDumpChannel) {
5998 LOGD("Starting raw dump stream");
5999 rc = mRawDumpChannel->start();
6000 if (rc != NO_ERROR) {
6001 LOGE("Error Starting Raw Dump Channel");
6002 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6003 it != mStreamInfo.end(); it++) {
6004 QCamera3Channel *channel =
6005 (QCamera3Channel *)(*it)->stream->priv;
6006 LOGH("Stopping Processing Channel mask=%d",
6007 channel->getStreamTypeMask());
6008 channel->stop();
6009 }
6010 if (mSupportChannel)
6011 mSupportChannel->stop();
6012 if (mAnalysisChannel) {
6013 mAnalysisChannel->stop();
6014 }
6015 mMetadataChannel->stop();
6016 pthread_mutex_unlock(&mMutex);
6017 return rc;
6018 }
6019 }
6020
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006021 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006022 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006023 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006024 if (rc != NO_ERROR) {
6025 LOGE("start_channel failed %d", rc);
6026 pthread_mutex_unlock(&mMutex);
6027 return rc;
6028 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006029
6030 {
6031 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006032 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006033
6034 // Now that sensor mode should have been selected, get the selected sensor mode
6035 // info.
6036 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6037 getCurrentSensorModeInfo(mSensorModeInfo);
6038
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006039 if (EaselManagerClientOpened) {
6040 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006041 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6042 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006043 if (rc != OK) {
6044 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6045 mCameraId, mSensorModeInfo.op_pixel_clk);
6046 pthread_mutex_unlock(&mMutex);
6047 return rc;
6048 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006049 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006050 }
6051 }
6052
6053 // Start sensor streaming.
6054 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6055 mChannelHandle);
6056 if (rc != NO_ERROR) {
6057 LOGE("start_sensor_stream_on failed %d", rc);
6058 pthread_mutex_unlock(&mMutex);
6059 return rc;
6060 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006061 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006063 }
6064
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006065 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006066 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006067 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006068 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006069 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6070 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6071 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6072 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006073
6074 if (isSessionHdrPlusModeCompatible()) {
6075 rc = enableHdrPlusModeLocked();
6076 if (rc != OK) {
6077 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6078 pthread_mutex_unlock(&mMutex);
6079 return rc;
6080 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006081 }
6082
6083 mFirstPreviewIntentSeen = true;
6084 }
6085 }
6086
Thierry Strudel3d639192016-09-09 11:52:26 -07006087 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6088
6089 mState = STARTED;
6090 // Added a timed condition wait
6091 struct timespec ts;
6092 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006093 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006094 if (rc < 0) {
6095 isValidTimeout = 0;
6096 LOGE("Error reading the real time clock!!");
6097 }
6098 else {
6099 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006100 int64_t timeout = 5;
6101 {
6102 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6103 // If there is a pending HDR+ request, the following requests may be blocked until the
6104 // HDR+ request is done. So allow a longer timeout.
6105 if (mHdrPlusPendingRequests.size() > 0) {
6106 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6107 }
6108 }
6109 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006110 }
6111 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006112 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 (mState != ERROR) && (mState != DEINIT)) {
6114 if (!isValidTimeout) {
6115 LOGD("Blocking on conditional wait");
6116 pthread_cond_wait(&mRequestCond, &mMutex);
6117 }
6118 else {
6119 LOGD("Blocking on timed conditional wait");
6120 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6121 if (rc == ETIMEDOUT) {
6122 rc = -ENODEV;
6123 LOGE("Unblocked on timeout!!!!");
6124 break;
6125 }
6126 }
6127 LOGD("Unblocked");
6128 if (mWokenUpByDaemon) {
6129 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006130 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006131 break;
6132 }
6133 }
6134 pthread_mutex_unlock(&mMutex);
6135
6136 return rc;
6137}
6138
6139/*===========================================================================
6140 * FUNCTION : dump
6141 *
6142 * DESCRIPTION:
6143 *
6144 * PARAMETERS :
6145 *
6146 *
6147 * RETURN :
6148 *==========================================================================*/
6149void QCamera3HardwareInterface::dump(int fd)
6150{
6151 pthread_mutex_lock(&mMutex);
6152 dprintf(fd, "\n Camera HAL3 information Begin \n");
6153
6154 dprintf(fd, "\nNumber of pending requests: %zu \n",
6155 mPendingRequestsList.size());
6156 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6157 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6158 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6159 for(pendingRequestIterator i = mPendingRequestsList.begin();
6160 i != mPendingRequestsList.end(); i++) {
6161 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6162 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6163 i->input_buffer);
6164 }
6165 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6166 mPendingBuffersMap.get_num_overall_buffers());
6167 dprintf(fd, "-------+------------------\n");
6168 dprintf(fd, " Frame | Stream type mask \n");
6169 dprintf(fd, "-------+------------------\n");
6170 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6171 for(auto &j : req.mPendingBufferList) {
6172 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6173 dprintf(fd, " %5d | %11d \n",
6174 req.frame_number, channel->getStreamTypeMask());
6175 }
6176 }
6177 dprintf(fd, "-------+------------------\n");
6178
6179 dprintf(fd, "\nPending frame drop list: %zu\n",
6180 mPendingFrameDropList.size());
6181 dprintf(fd, "-------+-----------\n");
6182 dprintf(fd, " Frame | Stream ID \n");
6183 dprintf(fd, "-------+-----------\n");
6184 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6185 i != mPendingFrameDropList.end(); i++) {
6186 dprintf(fd, " %5d | %9d \n",
6187 i->frame_number, i->stream_ID);
6188 }
6189 dprintf(fd, "-------+-----------\n");
6190
6191 dprintf(fd, "\n Camera HAL3 information End \n");
6192
6193 /* use dumpsys media.camera as trigger to send update debug level event */
6194 mUpdateDebugLevel = true;
6195 pthread_mutex_unlock(&mMutex);
6196 return;
6197}
6198
6199/*===========================================================================
6200 * FUNCTION : flush
6201 *
6202 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6203 * conditionally restarts channels
6204 *
6205 * PARAMETERS :
6206 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006207 * @ stopChannelImmediately: stop the channel immediately. This should be used
6208 * when device encountered an error and MIPI may has
6209 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006210 *
6211 * RETURN :
6212 * 0 on success
6213 * Error code on failure
6214 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006215int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006216{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006217 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006218 int32_t rc = NO_ERROR;
6219
6220 LOGD("Unblocking Process Capture Request");
6221 pthread_mutex_lock(&mMutex);
6222 mFlush = true;
6223 pthread_mutex_unlock(&mMutex);
6224
6225 rc = stopAllChannels();
6226 // unlink of dualcam
6227 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006228 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6229 &m_pDualCamCmdPtr->bundle_info;
6230 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006231 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6232 pthread_mutex_lock(&gCamLock);
6233
6234 if (mIsMainCamera == 1) {
6235 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6236 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006237 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006238 // related session id should be session id of linked session
6239 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6240 } else {
6241 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6242 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006243 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006244 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6245 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006246 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 pthread_mutex_unlock(&gCamLock);
6248
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006249 rc = mCameraHandle->ops->set_dual_cam_cmd(
6250 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 if (rc < 0) {
6252 LOGE("Dualcam: Unlink failed, but still proceed to close");
6253 }
6254 }
6255
6256 if (rc < 0) {
6257 LOGE("stopAllChannels failed");
6258 return rc;
6259 }
6260 if (mChannelHandle) {
6261 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006262 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006263 }
6264
6265 // Reset bundle info
6266 rc = setBundleInfo();
6267 if (rc < 0) {
6268 LOGE("setBundleInfo failed %d", rc);
6269 return rc;
6270 }
6271
6272 // Mutex Lock
6273 pthread_mutex_lock(&mMutex);
6274
6275 // Unblock process_capture_request
6276 mPendingLiveRequest = 0;
6277 pthread_cond_signal(&mRequestCond);
6278
6279 rc = notifyErrorForPendingRequests();
6280 if (rc < 0) {
6281 LOGE("notifyErrorForPendingRequests failed");
6282 pthread_mutex_unlock(&mMutex);
6283 return rc;
6284 }
6285
6286 mFlush = false;
6287
6288 // Start the Streams/Channels
6289 if (restartChannels) {
6290 rc = startAllChannels();
6291 if (rc < 0) {
6292 LOGE("startAllChannels failed");
6293 pthread_mutex_unlock(&mMutex);
6294 return rc;
6295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006296 if (mChannelHandle) {
6297 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006298 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006299 if (rc < 0) {
6300 LOGE("start_channel failed");
6301 pthread_mutex_unlock(&mMutex);
6302 return rc;
6303 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006304 }
6305 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006306 pthread_mutex_unlock(&mMutex);
6307
6308 return 0;
6309}
6310
6311/*===========================================================================
6312 * FUNCTION : flushPerf
6313 *
6314 * DESCRIPTION: This is the performance optimization version of flush that does
6315 * not use stream off, rather flushes the system
6316 *
6317 * PARAMETERS :
6318 *
6319 *
6320 * RETURN : 0 : success
6321 * -EINVAL: input is malformed (device is not valid)
6322 * -ENODEV: if the device has encountered a serious error
6323 *==========================================================================*/
6324int QCamera3HardwareInterface::flushPerf()
6325{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006326 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006327 int32_t rc = 0;
6328 struct timespec timeout;
6329 bool timed_wait = false;
6330
6331 pthread_mutex_lock(&mMutex);
6332 mFlushPerf = true;
6333 mPendingBuffersMap.numPendingBufsAtFlush =
6334 mPendingBuffersMap.get_num_overall_buffers();
6335 LOGD("Calling flush. Wait for %d buffers to return",
6336 mPendingBuffersMap.numPendingBufsAtFlush);
6337
6338 /* send the flush event to the backend */
6339 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6340 if (rc < 0) {
6341 LOGE("Error in flush: IOCTL failure");
6342 mFlushPerf = false;
6343 pthread_mutex_unlock(&mMutex);
6344 return -ENODEV;
6345 }
6346
6347 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6348 LOGD("No pending buffers in HAL, return flush");
6349 mFlushPerf = false;
6350 pthread_mutex_unlock(&mMutex);
6351 return rc;
6352 }
6353
6354 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006355 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006356 if (rc < 0) {
6357 LOGE("Error reading the real time clock, cannot use timed wait");
6358 } else {
6359 timeout.tv_sec += FLUSH_TIMEOUT;
6360 timed_wait = true;
6361 }
6362
6363 //Block on conditional variable
6364 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6365 LOGD("Waiting on mBuffersCond");
6366 if (!timed_wait) {
6367 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6368 if (rc != 0) {
6369 LOGE("pthread_cond_wait failed due to rc = %s",
6370 strerror(rc));
6371 break;
6372 }
6373 } else {
6374 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6375 if (rc != 0) {
6376 LOGE("pthread_cond_timedwait failed due to rc = %s",
6377 strerror(rc));
6378 break;
6379 }
6380 }
6381 }
6382 if (rc != 0) {
6383 mFlushPerf = false;
6384 pthread_mutex_unlock(&mMutex);
6385 return -ENODEV;
6386 }
6387
6388 LOGD("Received buffers, now safe to return them");
6389
6390 //make sure the channels handle flush
6391 //currently only required for the picture channel to release snapshot resources
6392 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6393 it != mStreamInfo.end(); it++) {
6394 QCamera3Channel *channel = (*it)->channel;
6395 if (channel) {
6396 rc = channel->flush();
6397 if (rc) {
6398 LOGE("Flushing the channels failed with error %d", rc);
6399 // even though the channel flush failed we need to continue and
6400 // return the buffers we have to the framework, however the return
6401 // value will be an error
6402 rc = -ENODEV;
6403 }
6404 }
6405 }
6406
6407 /* notify the frameworks and send errored results */
6408 rc = notifyErrorForPendingRequests();
6409 if (rc < 0) {
6410 LOGE("notifyErrorForPendingRequests failed");
6411 pthread_mutex_unlock(&mMutex);
6412 return rc;
6413 }
6414
6415 //unblock process_capture_request
6416 mPendingLiveRequest = 0;
6417 unblockRequestIfNecessary();
6418
6419 mFlushPerf = false;
6420 pthread_mutex_unlock(&mMutex);
6421 LOGD ("Flush Operation complete. rc = %d", rc);
6422 return rc;
6423}
6424
6425/*===========================================================================
6426 * FUNCTION : handleCameraDeviceError
6427 *
6428 * DESCRIPTION: This function calls internal flush and notifies the error to
6429 * framework and updates the state variable.
6430 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006431 * PARAMETERS :
6432 * @stopChannelImmediately : stop channels immediately without waiting for
6433 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006434 *
6435 * RETURN : NO_ERROR on Success
6436 * Error code on failure
6437 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006438int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006439{
6440 int32_t rc = NO_ERROR;
6441
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006442 {
6443 Mutex::Autolock lock(mFlushLock);
6444 pthread_mutex_lock(&mMutex);
6445 if (mState != ERROR) {
6446 //if mState != ERROR, nothing to be done
6447 pthread_mutex_unlock(&mMutex);
6448 return NO_ERROR;
6449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006450 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006451
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006452 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006453 if (NO_ERROR != rc) {
6454 LOGE("internal flush to handle mState = ERROR failed");
6455 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006456
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006457 pthread_mutex_lock(&mMutex);
6458 mState = DEINIT;
6459 pthread_mutex_unlock(&mMutex);
6460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006461
6462 camera3_notify_msg_t notify_msg;
6463 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6464 notify_msg.type = CAMERA3_MSG_ERROR;
6465 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6466 notify_msg.message.error.error_stream = NULL;
6467 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006468 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006469
6470 return rc;
6471}
6472
6473/*===========================================================================
6474 * FUNCTION : captureResultCb
6475 *
6476 * DESCRIPTION: Callback handler for all capture result
6477 * (streams, as well as metadata)
6478 *
6479 * PARAMETERS :
6480 * @metadata : metadata information
6481 * @buffer : actual gralloc buffer to be returned to frameworks.
6482 * NULL if metadata.
6483 *
6484 * RETURN : NONE
6485 *==========================================================================*/
6486void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6487 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6488{
6489 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006490 pthread_mutex_lock(&mMutex);
6491 uint8_t batchSize = mBatchSize;
6492 pthread_mutex_unlock(&mMutex);
6493 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006494 handleBatchMetadata(metadata_buf,
6495 true /* free_and_bufdone_meta_buf */);
6496 } else { /* mBatchSize = 0 */
6497 hdrPlusPerfLock(metadata_buf);
6498 pthread_mutex_lock(&mMutex);
6499 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006500 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006501 true /* last urgent frame of batch metadata */,
6502 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006503 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006504 pthread_mutex_unlock(&mMutex);
6505 }
6506 } else if (isInputBuffer) {
6507 pthread_mutex_lock(&mMutex);
6508 handleInputBufferWithLock(frame_number);
6509 pthread_mutex_unlock(&mMutex);
6510 } else {
6511 pthread_mutex_lock(&mMutex);
6512 handleBufferWithLock(buffer, frame_number);
6513 pthread_mutex_unlock(&mMutex);
6514 }
6515 return;
6516}
6517
6518/*===========================================================================
6519 * FUNCTION : getReprocessibleOutputStreamId
6520 *
6521 * DESCRIPTION: Get source output stream id for the input reprocess stream
6522 * based on size and format, which would be the largest
6523 * output stream if an input stream exists.
6524 *
6525 * PARAMETERS :
6526 * @id : return the stream id if found
6527 *
6528 * RETURN : int32_t type of status
6529 * NO_ERROR -- success
6530 * none-zero failure code
6531 *==========================================================================*/
6532int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6533{
6534 /* check if any output or bidirectional stream with the same size and format
6535 and return that stream */
6536 if ((mInputStreamInfo.dim.width > 0) &&
6537 (mInputStreamInfo.dim.height > 0)) {
6538 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6539 it != mStreamInfo.end(); it++) {
6540
6541 camera3_stream_t *stream = (*it)->stream;
6542 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6543 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6544 (stream->format == mInputStreamInfo.format)) {
6545 // Usage flag for an input stream and the source output stream
6546 // may be different.
6547 LOGD("Found reprocessible output stream! %p", *it);
6548 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6549 stream->usage, mInputStreamInfo.usage);
6550
6551 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6552 if (channel != NULL && channel->mStreams[0]) {
6553 id = channel->mStreams[0]->getMyServerID();
6554 return NO_ERROR;
6555 }
6556 }
6557 }
6558 } else {
6559 LOGD("No input stream, so no reprocessible output stream");
6560 }
6561 return NAME_NOT_FOUND;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : lookupFwkName
6566 *
6567 * DESCRIPTION: In case the enum is not same in fwk and backend
6568 * make sure the parameter is correctly propogated
6569 *
6570 * PARAMETERS :
6571 * @arr : map between the two enums
6572 * @len : len of the map
6573 * @hal_name : name of the hal_parm to map
6574 *
6575 * RETURN : int type of status
6576 * fwk_name -- success
6577 * none-zero failure code
6578 *==========================================================================*/
6579template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6580 size_t len, halType hal_name)
6581{
6582
6583 for (size_t i = 0; i < len; i++) {
6584 if (arr[i].hal_name == hal_name) {
6585 return arr[i].fwk_name;
6586 }
6587 }
6588
6589 /* Not able to find matching framework type is not necessarily
6590 * an error case. This happens when mm-camera supports more attributes
6591 * than the frameworks do */
6592 LOGH("Cannot find matching framework type");
6593 return NAME_NOT_FOUND;
6594}
6595
6596/*===========================================================================
6597 * FUNCTION : lookupHalName
6598 *
6599 * DESCRIPTION: In case the enum is not same in fwk and backend
6600 * make sure the parameter is correctly propogated
6601 *
6602 * PARAMETERS :
6603 * @arr : map between the two enums
6604 * @len : len of the map
6605 * @fwk_name : name of the hal_parm to map
6606 *
6607 * RETURN : int32_t type of status
6608 * hal_name -- success
6609 * none-zero failure code
6610 *==========================================================================*/
6611template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6612 size_t len, fwkType fwk_name)
6613{
6614 for (size_t i = 0; i < len; i++) {
6615 if (arr[i].fwk_name == fwk_name) {
6616 return arr[i].hal_name;
6617 }
6618 }
6619
6620 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6621 return NAME_NOT_FOUND;
6622}
6623
6624/*===========================================================================
6625 * FUNCTION : lookupProp
6626 *
6627 * DESCRIPTION: lookup a value by its name
6628 *
6629 * PARAMETERS :
6630 * @arr : map between the two enums
6631 * @len : size of the map
6632 * @name : name to be looked up
6633 *
6634 * RETURN : Value if found
6635 * CAM_CDS_MODE_MAX if not found
6636 *==========================================================================*/
6637template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6638 size_t len, const char *name)
6639{
6640 if (name) {
6641 for (size_t i = 0; i < len; i++) {
6642 if (!strcmp(arr[i].desc, name)) {
6643 return arr[i].val;
6644 }
6645 }
6646 }
6647 return CAM_CDS_MODE_MAX;
6648}
6649
6650/*===========================================================================
6651 *
6652 * DESCRIPTION:
6653 *
6654 * PARAMETERS :
6655 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006656 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006657 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006658 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6659 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006660 *
6661 * RETURN : camera_metadata_t*
6662 * metadata in a format specified by fwk
6663 *==========================================================================*/
6664camera_metadata_t*
6665QCamera3HardwareInterface::translateFromHalMetadata(
6666 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006667 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006668 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006669 bool lastMetadataInBatch,
6670 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006671{
6672 CameraMetadata camMetadata;
6673 camera_metadata_t *resultMetadata;
6674
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006675 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006676 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6677 * Timestamp is needed because it's used for shutter notify calculation.
6678 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006679 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006680 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006681 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006682 }
6683
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006684 if (pendingRequest.jpegMetadata.entryCount())
6685 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006686
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006687 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6688 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6689 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6690 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6691 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006692 if (mBatchSize == 0) {
6693 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006694 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006695 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006696
Samuel Ha68ba5172016-12-15 18:41:12 -08006697 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6698 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006699 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006700 // DevCamDebug metadata translateFromHalMetadata AF
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6702 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6703 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6704 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6707 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6708 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6709 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6712 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6713 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6714 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6717 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6718 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6719 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6722 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6723 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6724 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6727 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6728 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6729 *DevCamDebug_af_monitor_pdaf_target_pos;
6730 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6731 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6734 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6735 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6736 *DevCamDebug_af_monitor_pdaf_confidence;
6737 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6738 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6739 }
6740 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6741 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6742 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6743 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6744 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6745 }
6746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6747 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6748 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6749 *DevCamDebug_af_monitor_tof_target_pos;
6750 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6751 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6754 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6755 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6756 *DevCamDebug_af_monitor_tof_confidence;
6757 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6758 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6761 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6762 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6763 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6764 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6767 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6768 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6769 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6770 &fwk_DevCamDebug_af_monitor_type_select, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6773 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6774 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6775 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6776 &fwk_DevCamDebug_af_monitor_refocus, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6779 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6780 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6781 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6782 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6785 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6786 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6787 *DevCamDebug_af_search_pdaf_target_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6789 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6792 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6793 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6794 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6795 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6798 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6799 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6800 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6801 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6802 }
6803 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6804 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6805 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6806 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6807 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6810 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6811 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6812 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6813 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6814 }
6815 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6816 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6817 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6818 *DevCamDebug_af_search_tof_target_pos;
6819 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6820 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6823 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6824 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6825 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6826 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6827 }
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6829 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6830 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6831 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6832 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6835 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6836 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6837 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6838 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6839 }
6840 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6841 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6842 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6843 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6844 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6847 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6848 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6849 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6850 &fwk_DevCamDebug_af_search_type_select, 1);
6851 }
6852 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6853 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6854 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6855 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6856 &fwk_DevCamDebug_af_search_next_pos, 1);
6857 }
6858 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6859 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6860 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6861 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6862 &fwk_DevCamDebug_af_search_target_pos, 1);
6863 }
6864 // DevCamDebug metadata translateFromHalMetadata AEC
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6866 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6867 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6868 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6869 }
6870 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6871 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6872 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6873 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6876 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6877 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6878 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6879 }
6880 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6881 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6882 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6883 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6884 }
6885 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6886 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6887 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6888 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6889 }
6890 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6891 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6892 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6893 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6894 }
6895 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6896 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6897 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6898 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6899 }
6900 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6901 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6902 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6903 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6904 }
Samuel Ha34229982017-02-17 13:51:11 -08006905 // DevCamDebug metadata translateFromHalMetadata zzHDR
6906 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6907 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6908 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6909 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6910 }
6911 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6912 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006913 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006914 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6915 }
6916 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6917 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6918 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6919 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6920 }
6921 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6922 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006923 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006924 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6925 }
6926 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6927 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6928 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6929 *DevCamDebug_aec_hdr_sensitivity_ratio;
6930 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6931 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6932 }
6933 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6934 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6935 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6936 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6937 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6938 }
6939 // DevCamDebug metadata translateFromHalMetadata ADRC
6940 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6941 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6942 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6943 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6944 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6945 }
6946 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6947 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6948 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6949 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6950 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6951 }
6952 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6953 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6954 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6955 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6956 }
6957 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6958 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6959 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6960 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6961 }
6962 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6963 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6964 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6965 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6966 }
6967 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6968 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6969 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6970 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6971 }
Samuel Habdf4fac2017-07-28 17:21:18 -07006972 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
6973 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
6974 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
6975 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
6976 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
6977 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
6978 }
6979 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
6980 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
6981 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
6982 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
6983 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
6984 }
6985 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
6986 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
6987 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
6988 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
6989 &fwk_DevCamDebug_aec_subject_motion, 1);
6990 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006991 // DevCamDebug metadata translateFromHalMetadata AWB
6992 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6993 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6994 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6995 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6996 }
6997 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6998 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6999 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7000 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7001 }
7002 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7003 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7004 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7005 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7006 }
7007 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7008 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7009 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7010 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7013 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7014 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7015 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7016 }
7017 }
7018 // atrace_end(ATRACE_TAG_ALWAYS);
7019
Thierry Strudel3d639192016-09-09 11:52:26 -07007020 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7021 int64_t fwk_frame_number = *frame_number;
7022 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7023 }
7024
7025 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7026 int32_t fps_range[2];
7027 fps_range[0] = (int32_t)float_range->min_fps;
7028 fps_range[1] = (int32_t)float_range->max_fps;
7029 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7030 fps_range, 2);
7031 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7032 fps_range[0], fps_range[1]);
7033 }
7034
7035 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7036 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7037 }
7038
7039 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7040 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7041 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7042 *sceneMode);
7043 if (NAME_NOT_FOUND != val) {
7044 uint8_t fwkSceneMode = (uint8_t)val;
7045 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7046 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7047 fwkSceneMode);
7048 }
7049 }
7050
7051 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7052 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7053 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7054 }
7055
7056 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7057 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7058 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7059 }
7060
7061 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7062 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7063 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7064 }
7065
7066 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7067 CAM_INTF_META_EDGE_MODE, metadata) {
7068 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7069 }
7070
7071 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7072 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7073 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7074 }
7075
7076 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7077 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7078 }
7079
7080 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7081 if (0 <= *flashState) {
7082 uint8_t fwk_flashState = (uint8_t) *flashState;
7083 if (!gCamCapability[mCameraId]->flash_available) {
7084 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7085 }
7086 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7087 }
7088 }
7089
7090 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7091 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7092 if (NAME_NOT_FOUND != val) {
7093 uint8_t fwk_flashMode = (uint8_t)val;
7094 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7095 }
7096 }
7097
7098 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7099 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7100 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7101 }
7102
7103 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7104 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7105 }
7106
7107 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7108 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7109 }
7110
7111 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7112 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7113 }
7114
7115 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7116 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7117 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7118 }
7119
7120 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7121 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7122 LOGD("fwk_videoStab = %d", fwk_videoStab);
7123 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7124 } else {
7125 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7126 // and so hardcoding the Video Stab result to OFF mode.
7127 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7128 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007129 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007130 }
7131
7132 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7133 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7134 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7135 }
7136
7137 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7138 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7139 }
7140
Thierry Strudel3d639192016-09-09 11:52:26 -07007141 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7142 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007143 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007144
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007145 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7146 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007147
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007148 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007149 blackLevelAppliedPattern->cam_black_level[0],
7150 blackLevelAppliedPattern->cam_black_level[1],
7151 blackLevelAppliedPattern->cam_black_level[2],
7152 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007153 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7154 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007155
7156#ifndef USE_HAL_3_3
7157 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307158 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007159 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307160 fwk_blackLevelInd[0] /= 16.0;
7161 fwk_blackLevelInd[1] /= 16.0;
7162 fwk_blackLevelInd[2] /= 16.0;
7163 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007164 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7165 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007166#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007167 }
7168
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007169#ifndef USE_HAL_3_3
7170 // Fixed whitelevel is used by ISP/Sensor
7171 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7172 &gCamCapability[mCameraId]->white_level, 1);
7173#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007174
7175 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7176 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7177 int32_t scalerCropRegion[4];
7178 scalerCropRegion[0] = hScalerCropRegion->left;
7179 scalerCropRegion[1] = hScalerCropRegion->top;
7180 scalerCropRegion[2] = hScalerCropRegion->width;
7181 scalerCropRegion[3] = hScalerCropRegion->height;
7182
7183 // Adjust crop region from sensor output coordinate system to active
7184 // array coordinate system.
7185 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7186 scalerCropRegion[2], scalerCropRegion[3]);
7187
7188 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7189 }
7190
7191 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7192 LOGD("sensorExpTime = %lld", *sensorExpTime);
7193 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7194 }
7195
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007196 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7197 LOGD("expTimeBoost = %f", *expTimeBoost);
7198 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7199 }
7200
Thierry Strudel3d639192016-09-09 11:52:26 -07007201 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7202 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7203 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7204 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7205 }
7206
7207 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7208 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7209 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7210 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7211 sensorRollingShutterSkew, 1);
7212 }
7213
7214 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7215 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7216 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7217
7218 //calculate the noise profile based on sensitivity
7219 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7220 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7221 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7222 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7223 noise_profile[i] = noise_profile_S;
7224 noise_profile[i+1] = noise_profile_O;
7225 }
7226 LOGD("noise model entry (S, O) is (%f, %f)",
7227 noise_profile_S, noise_profile_O);
7228 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7229 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7230 }
7231
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007232#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007233 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007235 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007237 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7238 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7239 }
7240 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007241#endif
7242
Thierry Strudel3d639192016-09-09 11:52:26 -07007243 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7244 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7245 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7246 }
7247
7248 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7249 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7250 *faceDetectMode);
7251 if (NAME_NOT_FOUND != val) {
7252 uint8_t fwk_faceDetectMode = (uint8_t)val;
7253 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7254
7255 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7256 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7257 CAM_INTF_META_FACE_DETECTION, metadata) {
7258 uint8_t numFaces = MIN(
7259 faceDetectionInfo->num_faces_detected, MAX_ROI);
7260 int32_t faceIds[MAX_ROI];
7261 uint8_t faceScores[MAX_ROI];
7262 int32_t faceRectangles[MAX_ROI * 4];
7263 int32_t faceLandmarks[MAX_ROI * 6];
7264 size_t j = 0, k = 0;
7265
7266 for (size_t i = 0; i < numFaces; i++) {
7267 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7268 // Adjust crop region from sensor output coordinate system to active
7269 // array coordinate system.
7270 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7271 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7272 rect.width, rect.height);
7273
7274 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7275 faceRectangles+j, -1);
7276
Jason Lee8ce36fa2017-04-19 19:40:37 -07007277 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7278 "bottom-right (%d, %d)",
7279 faceDetectionInfo->frame_id, i,
7280 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7281 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7282
Thierry Strudel3d639192016-09-09 11:52:26 -07007283 j+= 4;
7284 }
7285 if (numFaces <= 0) {
7286 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7287 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7288 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7289 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7290 }
7291
7292 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7293 numFaces);
7294 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7295 faceRectangles, numFaces * 4U);
7296 if (fwk_faceDetectMode ==
7297 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7298 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7299 CAM_INTF_META_FACE_LANDMARK, metadata) {
7300
7301 for (size_t i = 0; i < numFaces; i++) {
7302 // Map the co-ordinate sensor output coordinate system to active
7303 // array coordinate system.
7304 mCropRegionMapper.toActiveArray(
7305 landmarks->face_landmarks[i].left_eye_center.x,
7306 landmarks->face_landmarks[i].left_eye_center.y);
7307 mCropRegionMapper.toActiveArray(
7308 landmarks->face_landmarks[i].right_eye_center.x,
7309 landmarks->face_landmarks[i].right_eye_center.y);
7310 mCropRegionMapper.toActiveArray(
7311 landmarks->face_landmarks[i].mouth_center.x,
7312 landmarks->face_landmarks[i].mouth_center.y);
7313
7314 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007315
7316 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7317 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7318 faceDetectionInfo->frame_id, i,
7319 faceLandmarks[k + LEFT_EYE_X],
7320 faceLandmarks[k + LEFT_EYE_Y],
7321 faceLandmarks[k + RIGHT_EYE_X],
7322 faceLandmarks[k + RIGHT_EYE_Y],
7323 faceLandmarks[k + MOUTH_X],
7324 faceLandmarks[k + MOUTH_Y]);
7325
Thierry Strudel04e026f2016-10-10 11:27:36 -07007326 k+= TOTAL_LANDMARK_INDICES;
7327 }
7328 } else {
7329 for (size_t i = 0; i < numFaces; i++) {
7330 setInvalidLandmarks(faceLandmarks+k);
7331 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007332 }
7333 }
7334
Jason Lee49619db2017-04-13 12:07:22 -07007335 for (size_t i = 0; i < numFaces; i++) {
7336 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7337
7338 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7339 faceDetectionInfo->frame_id, i, faceIds[i]);
7340 }
7341
Thierry Strudel3d639192016-09-09 11:52:26 -07007342 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7343 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7344 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007345 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007346 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7347 CAM_INTF_META_FACE_BLINK, metadata) {
7348 uint8_t detected[MAX_ROI];
7349 uint8_t degree[MAX_ROI * 2];
7350 for (size_t i = 0; i < numFaces; i++) {
7351 detected[i] = blinks->blink[i].blink_detected;
7352 degree[2 * i] = blinks->blink[i].left_blink;
7353 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007354
Jason Lee49619db2017-04-13 12:07:22 -07007355 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7356 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7357 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7358 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007359 }
7360 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7361 detected, numFaces);
7362 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7363 degree, numFaces * 2);
7364 }
7365 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7366 CAM_INTF_META_FACE_SMILE, metadata) {
7367 uint8_t degree[MAX_ROI];
7368 uint8_t confidence[MAX_ROI];
7369 for (size_t i = 0; i < numFaces; i++) {
7370 degree[i] = smiles->smile[i].smile_degree;
7371 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007372
Jason Lee49619db2017-04-13 12:07:22 -07007373 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7374 "smile_degree=%d, smile_score=%d",
7375 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007376 }
7377 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7378 degree, numFaces);
7379 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7380 confidence, numFaces);
7381 }
7382 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7383 CAM_INTF_META_FACE_GAZE, metadata) {
7384 int8_t angle[MAX_ROI];
7385 int32_t direction[MAX_ROI * 3];
7386 int8_t degree[MAX_ROI * 2];
7387 for (size_t i = 0; i < numFaces; i++) {
7388 angle[i] = gazes->gaze[i].gaze_angle;
7389 direction[3 * i] = gazes->gaze[i].updown_dir;
7390 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7391 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7392 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7393 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007394
7395 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7396 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7397 "left_right_gaze=%d, top_bottom_gaze=%d",
7398 faceDetectionInfo->frame_id, i, angle[i],
7399 direction[3 * i], direction[3 * i + 1],
7400 direction[3 * i + 2],
7401 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007402 }
7403 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7404 (uint8_t *)angle, numFaces);
7405 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7406 direction, numFaces * 3);
7407 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7408 (uint8_t *)degree, numFaces * 2);
7409 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007410 }
7411 }
7412 }
7413 }
7414
7415 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7416 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007417 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007418 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007419 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007420
Shuzhen Wang14415f52016-11-16 18:26:18 -08007421 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7422 histogramBins = *histBins;
7423 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7424 }
7425
7426 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007427 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7428 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007429 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007430
7431 switch (stats_data->type) {
7432 case CAM_HISTOGRAM_TYPE_BAYER:
7433 switch (stats_data->bayer_stats.data_type) {
7434 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007435 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7436 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007437 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007438 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7439 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007440 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007441 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7442 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007443 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007444 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007445 case CAM_STATS_CHANNEL_R:
7446 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007447 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7448 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007449 }
7450 break;
7451 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007452 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007453 break;
7454 }
7455
Shuzhen Wang14415f52016-11-16 18:26:18 -08007456 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007457 }
7458 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007459 }
7460
7461 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7462 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7463 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7464 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7465 }
7466
7467 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7468 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7469 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7470 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7471 }
7472
7473 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7474 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7475 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7476 CAM_MAX_SHADING_MAP_HEIGHT);
7477 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7478 CAM_MAX_SHADING_MAP_WIDTH);
7479 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7480 lensShadingMap->lens_shading, 4U * map_width * map_height);
7481 }
7482
7483 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7484 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7485 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7486 }
7487
7488 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7489 //Populate CAM_INTF_META_TONEMAP_CURVES
7490 /* ch0 = G, ch 1 = B, ch 2 = R*/
7491 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7492 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7493 tonemap->tonemap_points_cnt,
7494 CAM_MAX_TONEMAP_CURVE_SIZE);
7495 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7496 }
7497
7498 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7499 &tonemap->curves[0].tonemap_points[0][0],
7500 tonemap->tonemap_points_cnt * 2);
7501
7502 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7503 &tonemap->curves[1].tonemap_points[0][0],
7504 tonemap->tonemap_points_cnt * 2);
7505
7506 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7507 &tonemap->curves[2].tonemap_points[0][0],
7508 tonemap->tonemap_points_cnt * 2);
7509 }
7510
7511 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7512 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7513 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7514 CC_GAIN_MAX);
7515 }
7516
7517 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7518 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7519 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7520 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7521 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7522 }
7523
7524 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7525 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7526 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7527 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7528 toneCurve->tonemap_points_cnt,
7529 CAM_MAX_TONEMAP_CURVE_SIZE);
7530 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7531 }
7532 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7533 (float*)toneCurve->curve.tonemap_points,
7534 toneCurve->tonemap_points_cnt * 2);
7535 }
7536
7537 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7538 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7539 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7540 predColorCorrectionGains->gains, 4);
7541 }
7542
7543 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7544 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7545 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7546 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7547 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7548 }
7549
7550 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7551 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7552 }
7553
7554 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7555 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7556 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7557 }
7558
7559 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7560 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7561 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7562 }
7563
7564 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7565 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7566 *effectMode);
7567 if (NAME_NOT_FOUND != val) {
7568 uint8_t fwk_effectMode = (uint8_t)val;
7569 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7570 }
7571 }
7572
7573 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7574 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7575 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7576 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7577 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7578 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7579 }
7580 int32_t fwk_testPatternData[4];
7581 fwk_testPatternData[0] = testPatternData->r;
7582 fwk_testPatternData[3] = testPatternData->b;
7583 switch (gCamCapability[mCameraId]->color_arrangement) {
7584 case CAM_FILTER_ARRANGEMENT_RGGB:
7585 case CAM_FILTER_ARRANGEMENT_GRBG:
7586 fwk_testPatternData[1] = testPatternData->gr;
7587 fwk_testPatternData[2] = testPatternData->gb;
7588 break;
7589 case CAM_FILTER_ARRANGEMENT_GBRG:
7590 case CAM_FILTER_ARRANGEMENT_BGGR:
7591 fwk_testPatternData[2] = testPatternData->gr;
7592 fwk_testPatternData[1] = testPatternData->gb;
7593 break;
7594 default:
7595 LOGE("color arrangement %d is not supported",
7596 gCamCapability[mCameraId]->color_arrangement);
7597 break;
7598 }
7599 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7600 }
7601
7602 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7603 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7604 }
7605
7606 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7607 String8 str((const char *)gps_methods);
7608 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7609 }
7610
7611 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7612 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7613 }
7614
7615 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7616 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7617 }
7618
7619 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7620 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7621 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7622 }
7623
7624 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7625 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7626 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7627 }
7628
7629 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7630 int32_t fwk_thumb_size[2];
7631 fwk_thumb_size[0] = thumb_size->width;
7632 fwk_thumb_size[1] = thumb_size->height;
7633 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7634 }
7635
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007636 // Skip reprocess metadata if there is no input stream.
7637 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7638 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7639 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7640 privateData,
7641 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007643 }
7644
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007645 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007646 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007647 meteringMode, 1);
7648 }
7649
Thierry Strudel54dc9782017-02-15 12:12:10 -08007650 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7651 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7652 LOGD("hdr_scene_data: %d %f\n",
7653 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7654 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7655 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7656 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7657 &isHdr, 1);
7658 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7659 &isHdrConfidence, 1);
7660 }
7661
7662
7663
Thierry Strudel3d639192016-09-09 11:52:26 -07007664 if (metadata->is_tuning_params_valid) {
7665 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7666 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7667 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7668
7669
7670 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7671 sizeof(uint32_t));
7672 data += sizeof(uint32_t);
7673
7674 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7675 sizeof(uint32_t));
7676 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7677 data += sizeof(uint32_t);
7678
7679 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7680 sizeof(uint32_t));
7681 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7682 data += sizeof(uint32_t);
7683
7684 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7685 sizeof(uint32_t));
7686 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7687 data += sizeof(uint32_t);
7688
7689 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7690 sizeof(uint32_t));
7691 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7692 data += sizeof(uint32_t);
7693
7694 metadata->tuning_params.tuning_mod3_data_size = 0;
7695 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7696 sizeof(uint32_t));
7697 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7698 data += sizeof(uint32_t);
7699
7700 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7701 TUNING_SENSOR_DATA_MAX);
7702 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7703 count);
7704 data += count;
7705
7706 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7707 TUNING_VFE_DATA_MAX);
7708 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7709 count);
7710 data += count;
7711
7712 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7713 TUNING_CPP_DATA_MAX);
7714 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7715 count);
7716 data += count;
7717
7718 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7719 TUNING_CAC_DATA_MAX);
7720 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7721 count);
7722 data += count;
7723
7724 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7725 (int32_t *)(void *)tuning_meta_data_blob,
7726 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7727 }
7728
7729 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7730 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7731 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7732 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7733 NEUTRAL_COL_POINTS);
7734 }
7735
7736 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7737 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7738 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7739 }
7740
7741 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7742 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7743 // Adjust crop region from sensor output coordinate system to active
7744 // array coordinate system.
7745 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7746 hAeRegions->rect.width, hAeRegions->rect.height);
7747
7748 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7749 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7750 REGIONS_TUPLE_COUNT);
7751 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7752 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7753 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7754 hAeRegions->rect.height);
7755 }
7756
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007757 if (!pendingRequest.focusStateSent) {
7758 if (pendingRequest.focusStateValid) {
7759 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7760 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007761 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007762 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7763 uint8_t fwk_afState = (uint8_t) *afState;
7764 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7765 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7766 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007767 }
7768 }
7769
Thierry Strudel3d639192016-09-09 11:52:26 -07007770 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7771 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7772 }
7773
7774 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7775 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7776 }
7777
7778 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7779 uint8_t fwk_lensState = *lensState;
7780 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7781 }
7782
Thierry Strudel3d639192016-09-09 11:52:26 -07007783 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007784 uint32_t ab_mode = *hal_ab_mode;
7785 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7786 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7787 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7788 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007790 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007791 if (NAME_NOT_FOUND != val) {
7792 uint8_t fwk_ab_mode = (uint8_t)val;
7793 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7794 }
7795 }
7796
7797 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7798 int val = lookupFwkName(SCENE_MODES_MAP,
7799 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7800 if (NAME_NOT_FOUND != val) {
7801 uint8_t fwkBestshotMode = (uint8_t)val;
7802 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7803 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7804 } else {
7805 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7806 }
7807 }
7808
7809 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7810 uint8_t fwk_mode = (uint8_t) *mode;
7811 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7812 }
7813
7814 /* Constant metadata values to be update*/
7815 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7816 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7817
7818 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7819 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7820
7821 int32_t hotPixelMap[2];
7822 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7823
7824 // CDS
7825 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7826 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7827 }
7828
Thierry Strudel04e026f2016-10-10 11:27:36 -07007829 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7830 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007831 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007832 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7833 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7834 } else {
7835 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7836 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007837
7838 if(fwk_hdr != curr_hdr_state) {
7839 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7840 if(fwk_hdr)
7841 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7842 else
7843 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7844 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007845 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7846 }
7847
Thierry Strudel54dc9782017-02-15 12:12:10 -08007848 //binning correction
7849 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7850 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7851 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7852 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7853 }
7854
Thierry Strudel04e026f2016-10-10 11:27:36 -07007855 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007856 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007857 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7858 int8_t is_ir_on = 0;
7859
7860 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7861 if(is_ir_on != curr_ir_state) {
7862 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7863 if(is_ir_on)
7864 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7865 else
7866 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7867 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007868 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007869 }
7870
Thierry Strudel269c81a2016-10-12 12:13:59 -07007871 // AEC SPEED
7872 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7873 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7874 }
7875
7876 // AWB SPEED
7877 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7878 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7879 }
7880
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 // TNR
7882 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7883 uint8_t tnr_enable = tnr->denoise_enable;
7884 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007885 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7886 int8_t is_tnr_on = 0;
7887
7888 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7889 if(is_tnr_on != curr_tnr_state) {
7890 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7891 if(is_tnr_on)
7892 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7893 else
7894 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007896
7897 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7898 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7899 }
7900
7901 // Reprocess crop data
7902 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7903 uint8_t cnt = crop_data->num_of_streams;
7904 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7905 // mm-qcamera-daemon only posts crop_data for streams
7906 // not linked to pproc. So no valid crop metadata is not
7907 // necessarily an error case.
7908 LOGD("No valid crop metadata entries");
7909 } else {
7910 uint32_t reproc_stream_id;
7911 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7912 LOGD("No reprocessible stream found, ignore crop data");
7913 } else {
7914 int rc = NO_ERROR;
7915 Vector<int32_t> roi_map;
7916 int32_t *crop = new int32_t[cnt*4];
7917 if (NULL == crop) {
7918 rc = NO_MEMORY;
7919 }
7920 if (NO_ERROR == rc) {
7921 int32_t streams_found = 0;
7922 for (size_t i = 0; i < cnt; i++) {
7923 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7924 if (pprocDone) {
7925 // HAL already does internal reprocessing,
7926 // either via reprocessing before JPEG encoding,
7927 // or offline postprocessing for pproc bypass case.
7928 crop[0] = 0;
7929 crop[1] = 0;
7930 crop[2] = mInputStreamInfo.dim.width;
7931 crop[3] = mInputStreamInfo.dim.height;
7932 } else {
7933 crop[0] = crop_data->crop_info[i].crop.left;
7934 crop[1] = crop_data->crop_info[i].crop.top;
7935 crop[2] = crop_data->crop_info[i].crop.width;
7936 crop[3] = crop_data->crop_info[i].crop.height;
7937 }
7938 roi_map.add(crop_data->crop_info[i].roi_map.left);
7939 roi_map.add(crop_data->crop_info[i].roi_map.top);
7940 roi_map.add(crop_data->crop_info[i].roi_map.width);
7941 roi_map.add(crop_data->crop_info[i].roi_map.height);
7942 streams_found++;
7943 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7944 crop[0], crop[1], crop[2], crop[3]);
7945 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7946 crop_data->crop_info[i].roi_map.left,
7947 crop_data->crop_info[i].roi_map.top,
7948 crop_data->crop_info[i].roi_map.width,
7949 crop_data->crop_info[i].roi_map.height);
7950 break;
7951
7952 }
7953 }
7954 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7955 &streams_found, 1);
7956 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7957 crop, (size_t)(streams_found * 4));
7958 if (roi_map.array()) {
7959 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7960 roi_map.array(), roi_map.size());
7961 }
7962 }
7963 if (crop) {
7964 delete [] crop;
7965 }
7966 }
7967 }
7968 }
7969
7970 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7971 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7972 // so hardcoding the CAC result to OFF mode.
7973 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7974 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7975 } else {
7976 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7977 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7978 *cacMode);
7979 if (NAME_NOT_FOUND != val) {
7980 uint8_t resultCacMode = (uint8_t)val;
7981 // check whether CAC result from CB is equal to Framework set CAC mode
7982 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007983 if (pendingRequest.fwkCacMode != resultCacMode) {
7984 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07007985 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007986 //Check if CAC is disabled by property
7987 if (m_cacModeDisabled) {
7988 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7989 }
7990
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007991 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007992 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7993 } else {
7994 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7995 }
7996 }
7997 }
7998
7999 // Post blob of cam_cds_data through vendor tag.
8000 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8001 uint8_t cnt = cdsInfo->num_of_streams;
8002 cam_cds_data_t cdsDataOverride;
8003 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8004 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8005 cdsDataOverride.num_of_streams = 1;
8006 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8007 uint32_t reproc_stream_id;
8008 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8009 LOGD("No reprocessible stream found, ignore cds data");
8010 } else {
8011 for (size_t i = 0; i < cnt; i++) {
8012 if (cdsInfo->cds_info[i].stream_id ==
8013 reproc_stream_id) {
8014 cdsDataOverride.cds_info[0].cds_enable =
8015 cdsInfo->cds_info[i].cds_enable;
8016 break;
8017 }
8018 }
8019 }
8020 } else {
8021 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8022 }
8023 camMetadata.update(QCAMERA3_CDS_INFO,
8024 (uint8_t *)&cdsDataOverride,
8025 sizeof(cam_cds_data_t));
8026 }
8027
8028 // Ldaf calibration data
8029 if (!mLdafCalibExist) {
8030 IF_META_AVAILABLE(uint32_t, ldafCalib,
8031 CAM_INTF_META_LDAF_EXIF, metadata) {
8032 mLdafCalibExist = true;
8033 mLdafCalib[0] = ldafCalib[0];
8034 mLdafCalib[1] = ldafCalib[1];
8035 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8036 ldafCalib[0], ldafCalib[1]);
8037 }
8038 }
8039
Thierry Strudel54dc9782017-02-15 12:12:10 -08008040 // EXIF debug data through vendor tag
8041 /*
8042 * Mobicat Mask can assume 3 values:
8043 * 1 refers to Mobicat data,
8044 * 2 refers to Stats Debug and Exif Debug Data
8045 * 3 refers to Mobicat and Stats Debug Data
8046 * We want to make sure that we are sending Exif debug data
8047 * only when Mobicat Mask is 2.
8048 */
8049 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8050 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8051 (uint8_t *)(void *)mExifParams.debug_params,
8052 sizeof(mm_jpeg_debug_exif_params_t));
8053 }
8054
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008055 // Reprocess and DDM debug data through vendor tag
8056 cam_reprocess_info_t repro_info;
8057 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008058 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8059 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008060 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008061 }
8062 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8063 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008064 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008065 }
8066 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8067 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008068 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008069 }
8070 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8071 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008072 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008073 }
8074 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8075 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008076 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
8078 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008079 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008080 }
8081 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8082 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008083 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008085 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8086 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8087 }
8088 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8089 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8090 }
8091 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8092 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008093
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008094 // INSTANT AEC MODE
8095 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8096 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8097 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8098 }
8099
Shuzhen Wange763e802016-03-31 10:24:29 -07008100 // AF scene change
8101 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8102 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8103 }
8104
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008105 // Enable ZSL
8106 if (enableZsl != nullptr) {
8107 uint8_t value = *enableZsl ?
8108 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8109 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8110 }
8111
Xu Han821ea9c2017-05-23 09:00:40 -07008112 // OIS Data
8113 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8114 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8115 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8116 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8117 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8119 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8120 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8121 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8122 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8123 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8124 }
8125
Thierry Strudel3d639192016-09-09 11:52:26 -07008126 resultMetadata = camMetadata.release();
8127 return resultMetadata;
8128}
8129
8130/*===========================================================================
8131 * FUNCTION : saveExifParams
8132 *
8133 * DESCRIPTION:
8134 *
8135 * PARAMETERS :
8136 * @metadata : metadata information from callback
8137 *
8138 * RETURN : none
8139 *
8140 *==========================================================================*/
8141void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8142{
8143 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8144 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8145 if (mExifParams.debug_params) {
8146 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8147 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8148 }
8149 }
8150 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8151 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8152 if (mExifParams.debug_params) {
8153 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8154 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8155 }
8156 }
8157 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8158 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8159 if (mExifParams.debug_params) {
8160 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8161 mExifParams.debug_params->af_debug_params_valid = TRUE;
8162 }
8163 }
8164 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8165 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8166 if (mExifParams.debug_params) {
8167 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8168 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8169 }
8170 }
8171 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8172 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8173 if (mExifParams.debug_params) {
8174 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8175 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8176 }
8177 }
8178 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8179 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8180 if (mExifParams.debug_params) {
8181 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8182 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8183 }
8184 }
8185 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8186 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8187 if (mExifParams.debug_params) {
8188 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8189 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8190 }
8191 }
8192 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8193 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8194 if (mExifParams.debug_params) {
8195 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8196 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8197 }
8198 }
8199}
8200
8201/*===========================================================================
8202 * FUNCTION : get3AExifParams
8203 *
8204 * DESCRIPTION:
8205 *
8206 * PARAMETERS : none
8207 *
8208 *
8209 * RETURN : mm_jpeg_exif_params_t
8210 *
8211 *==========================================================================*/
8212mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8213{
8214 return mExifParams;
8215}
8216
8217/*===========================================================================
8218 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8219 *
8220 * DESCRIPTION:
8221 *
8222 * PARAMETERS :
8223 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008224 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8225 * urgent metadata in a batch. Always true for
8226 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008227 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008228 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8229 * i.e. even though it doesn't map to a valid partial
8230 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008231 * RETURN : camera_metadata_t*
8232 * metadata in a format specified by fwk
8233 *==========================================================================*/
8234camera_metadata_t*
8235QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008236 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008237 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008238{
8239 CameraMetadata camMetadata;
8240 camera_metadata_t *resultMetadata;
8241
Shuzhen Wang485e2442017-08-02 12:21:08 -07008242 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008243 /* In batch mode, use empty metadata if this is not the last in batch
8244 */
8245 resultMetadata = allocate_camera_metadata(0, 0);
8246 return resultMetadata;
8247 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008248
8249 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8250 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8251 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8252 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8253 }
8254
8255 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8256 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8257 &aecTrigger->trigger, 1);
8258 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8259 &aecTrigger->trigger_id, 1);
8260 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8261 aecTrigger->trigger);
8262 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8263 aecTrigger->trigger_id);
8264 }
8265
8266 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8267 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8268 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8269 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8270 }
8271
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008272 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8273 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8274 if (NAME_NOT_FOUND != val) {
8275 uint8_t fwkAfMode = (uint8_t)val;
8276 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8277 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8278 } else {
8279 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8280 val);
8281 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008282 }
8283
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008284 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8285 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8286 af_trigger->trigger);
8287 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8288 af_trigger->trigger_id);
8289
8290 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8291 mAfTrigger = *af_trigger;
8292 uint32_t fwk_AfState = (uint32_t) *afState;
8293
8294 // If this is the result for a new trigger, check if there is new early
8295 // af state. If there is, use the last af state for all results
8296 // preceding current partial frame number.
8297 for (auto & pendingRequest : mPendingRequestsList) {
8298 if (pendingRequest.frame_number < frame_number) {
8299 pendingRequest.focusStateValid = true;
8300 pendingRequest.focusState = fwk_AfState;
8301 } else if (pendingRequest.frame_number == frame_number) {
8302 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8303 // Check if early AF state for trigger exists. If yes, send AF state as
8304 // partial result for better latency.
8305 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8306 pendingRequest.focusStateSent = true;
8307 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8308 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8309 frame_number, fwkEarlyAfState);
8310 }
8311 }
8312 }
8313 }
8314 }
8315 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8316 &mAfTrigger.trigger, 1);
8317 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8318
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008319 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8320 /*af regions*/
8321 int32_t afRegions[REGIONS_TUPLE_COUNT];
8322 // Adjust crop region from sensor output coordinate system to active
8323 // array coordinate system.
8324 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8325 hAfRegions->rect.width, hAfRegions->rect.height);
8326
8327 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8328 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8329 REGIONS_TUPLE_COUNT);
8330 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8331 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8332 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8333 hAfRegions->rect.height);
8334 }
8335
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008336 // AF region confidence
8337 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8338 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8339 }
8340
Thierry Strudel3d639192016-09-09 11:52:26 -07008341 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8342 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8343 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8344 if (NAME_NOT_FOUND != val) {
8345 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8346 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8347 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8348 } else {
8349 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8350 }
8351 }
8352
8353 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8354 uint32_t aeMode = CAM_AE_MODE_MAX;
8355 int32_t flashMode = CAM_FLASH_MODE_MAX;
8356 int32_t redeye = -1;
8357 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8358 aeMode = *pAeMode;
8359 }
8360 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8361 flashMode = *pFlashMode;
8362 }
8363 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8364 redeye = *pRedeye;
8365 }
8366
8367 if (1 == redeye) {
8368 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8369 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8370 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8371 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8372 flashMode);
8373 if (NAME_NOT_FOUND != val) {
8374 fwk_aeMode = (uint8_t)val;
8375 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8376 } else {
8377 LOGE("Unsupported flash mode %d", flashMode);
8378 }
8379 } else if (aeMode == CAM_AE_MODE_ON) {
8380 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8381 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8382 } else if (aeMode == CAM_AE_MODE_OFF) {
8383 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8384 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008385 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8386 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8387 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008388 } else {
8389 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8390 "flashMode:%d, aeMode:%u!!!",
8391 redeye, flashMode, aeMode);
8392 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008393 if (mInstantAEC) {
8394 // Increment frame Idx count untill a bound reached for instant AEC.
8395 mInstantAecFrameIdxCount++;
8396 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8397 CAM_INTF_META_AEC_INFO, metadata) {
8398 LOGH("ae_params->settled = %d",ae_params->settled);
8399 // If AEC settled, or if number of frames reached bound value,
8400 // should reset instant AEC.
8401 if (ae_params->settled ||
8402 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8403 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8404 mInstantAEC = false;
8405 mResetInstantAEC = true;
8406 mInstantAecFrameIdxCount = 0;
8407 }
8408 }
8409 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008410 resultMetadata = camMetadata.release();
8411 return resultMetadata;
8412}
8413
8414/*===========================================================================
8415 * FUNCTION : dumpMetadataToFile
8416 *
8417 * DESCRIPTION: Dumps tuning metadata to file system
8418 *
8419 * PARAMETERS :
8420 * @meta : tuning metadata
8421 * @dumpFrameCount : current dump frame count
8422 * @enabled : Enable mask
8423 *
8424 *==========================================================================*/
8425void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8426 uint32_t &dumpFrameCount,
8427 bool enabled,
8428 const char *type,
8429 uint32_t frameNumber)
8430{
8431 //Some sanity checks
8432 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8433 LOGE("Tuning sensor data size bigger than expected %d: %d",
8434 meta.tuning_sensor_data_size,
8435 TUNING_SENSOR_DATA_MAX);
8436 return;
8437 }
8438
8439 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8440 LOGE("Tuning VFE data size bigger than expected %d: %d",
8441 meta.tuning_vfe_data_size,
8442 TUNING_VFE_DATA_MAX);
8443 return;
8444 }
8445
8446 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8447 LOGE("Tuning CPP data size bigger than expected %d: %d",
8448 meta.tuning_cpp_data_size,
8449 TUNING_CPP_DATA_MAX);
8450 return;
8451 }
8452
8453 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8454 LOGE("Tuning CAC data size bigger than expected %d: %d",
8455 meta.tuning_cac_data_size,
8456 TUNING_CAC_DATA_MAX);
8457 return;
8458 }
8459 //
8460
8461 if(enabled){
8462 char timeBuf[FILENAME_MAX];
8463 char buf[FILENAME_MAX];
8464 memset(buf, 0, sizeof(buf));
8465 memset(timeBuf, 0, sizeof(timeBuf));
8466 time_t current_time;
8467 struct tm * timeinfo;
8468 time (&current_time);
8469 timeinfo = localtime (&current_time);
8470 if (timeinfo != NULL) {
8471 strftime (timeBuf, sizeof(timeBuf),
8472 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8473 }
8474 String8 filePath(timeBuf);
8475 snprintf(buf,
8476 sizeof(buf),
8477 "%dm_%s_%d.bin",
8478 dumpFrameCount,
8479 type,
8480 frameNumber);
8481 filePath.append(buf);
8482 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8483 if (file_fd >= 0) {
8484 ssize_t written_len = 0;
8485 meta.tuning_data_version = TUNING_DATA_VERSION;
8486 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8487 written_len += write(file_fd, data, sizeof(uint32_t));
8488 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8489 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8490 written_len += write(file_fd, data, sizeof(uint32_t));
8491 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8492 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8493 written_len += write(file_fd, data, sizeof(uint32_t));
8494 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8495 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8496 written_len += write(file_fd, data, sizeof(uint32_t));
8497 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8498 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8499 written_len += write(file_fd, data, sizeof(uint32_t));
8500 meta.tuning_mod3_data_size = 0;
8501 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8502 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8503 written_len += write(file_fd, data, sizeof(uint32_t));
8504 size_t total_size = meta.tuning_sensor_data_size;
8505 data = (void *)((uint8_t *)&meta.data);
8506 written_len += write(file_fd, data, total_size);
8507 total_size = meta.tuning_vfe_data_size;
8508 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8509 written_len += write(file_fd, data, total_size);
8510 total_size = meta.tuning_cpp_data_size;
8511 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8512 written_len += write(file_fd, data, total_size);
8513 total_size = meta.tuning_cac_data_size;
8514 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8515 written_len += write(file_fd, data, total_size);
8516 close(file_fd);
8517 }else {
8518 LOGE("fail to open file for metadata dumping");
8519 }
8520 }
8521}
8522
8523/*===========================================================================
8524 * FUNCTION : cleanAndSortStreamInfo
8525 *
8526 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8527 * and sort them such that raw stream is at the end of the list
8528 * This is a workaround for camera daemon constraint.
8529 *
8530 * PARAMETERS : None
8531 *
8532 *==========================================================================*/
8533void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8534{
8535 List<stream_info_t *> newStreamInfo;
8536
8537 /*clean up invalid streams*/
8538 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8539 it != mStreamInfo.end();) {
8540 if(((*it)->status) == INVALID){
8541 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8542 delete channel;
8543 free(*it);
8544 it = mStreamInfo.erase(it);
8545 } else {
8546 it++;
8547 }
8548 }
8549
8550 // Move preview/video/callback/snapshot streams into newList
8551 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8552 it != mStreamInfo.end();) {
8553 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8554 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8555 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8556 newStreamInfo.push_back(*it);
8557 it = mStreamInfo.erase(it);
8558 } else
8559 it++;
8560 }
8561 // Move raw streams into newList
8562 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8563 it != mStreamInfo.end();) {
8564 newStreamInfo.push_back(*it);
8565 it = mStreamInfo.erase(it);
8566 }
8567
8568 mStreamInfo = newStreamInfo;
8569}
8570
8571/*===========================================================================
8572 * FUNCTION : extractJpegMetadata
8573 *
8574 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8575 * JPEG metadata is cached in HAL, and return as part of capture
8576 * result when metadata is returned from camera daemon.
8577 *
8578 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8579 * @request: capture request
8580 *
8581 *==========================================================================*/
8582void QCamera3HardwareInterface::extractJpegMetadata(
8583 CameraMetadata& jpegMetadata,
8584 const camera3_capture_request_t *request)
8585{
8586 CameraMetadata frame_settings;
8587 frame_settings = request->settings;
8588
8589 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8590 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8591 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8592 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8593
8594 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8595 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8596 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8597 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8598
8599 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8600 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8601 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8602 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8603
8604 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8605 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8606 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8607 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8608
8609 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8610 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8611 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8612 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8613
8614 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8615 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8616 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8617 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8618
8619 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8620 int32_t thumbnail_size[2];
8621 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8622 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8623 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8624 int32_t orientation =
8625 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008626 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008627 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8628 int32_t temp;
8629 temp = thumbnail_size[0];
8630 thumbnail_size[0] = thumbnail_size[1];
8631 thumbnail_size[1] = temp;
8632 }
8633 }
8634 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8635 thumbnail_size,
8636 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8637 }
8638
8639}
8640
8641/*===========================================================================
8642 * FUNCTION : convertToRegions
8643 *
8644 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8645 *
8646 * PARAMETERS :
8647 * @rect : cam_rect_t struct to convert
8648 * @region : int32_t destination array
8649 * @weight : if we are converting from cam_area_t, weight is valid
8650 * else weight = -1
8651 *
8652 *==========================================================================*/
8653void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8654 int32_t *region, int weight)
8655{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008656 region[FACE_LEFT] = rect.left;
8657 region[FACE_TOP] = rect.top;
8658 region[FACE_RIGHT] = rect.left + rect.width;
8659 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008660 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008661 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008662 }
8663}
8664
8665/*===========================================================================
8666 * FUNCTION : convertFromRegions
8667 *
8668 * DESCRIPTION: helper method to convert from array to cam_rect_t
8669 *
8670 * PARAMETERS :
8671 * @rect : cam_rect_t struct to convert
8672 * @region : int32_t destination array
8673 * @weight : if we are converting from cam_area_t, weight is valid
8674 * else weight = -1
8675 *
8676 *==========================================================================*/
8677void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008678 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008679{
Thierry Strudel3d639192016-09-09 11:52:26 -07008680 int32_t x_min = frame_settings.find(tag).data.i32[0];
8681 int32_t y_min = frame_settings.find(tag).data.i32[1];
8682 int32_t x_max = frame_settings.find(tag).data.i32[2];
8683 int32_t y_max = frame_settings.find(tag).data.i32[3];
8684 roi.weight = frame_settings.find(tag).data.i32[4];
8685 roi.rect.left = x_min;
8686 roi.rect.top = y_min;
8687 roi.rect.width = x_max - x_min;
8688 roi.rect.height = y_max - y_min;
8689}
8690
8691/*===========================================================================
8692 * FUNCTION : resetIfNeededROI
8693 *
8694 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8695 * crop region
8696 *
8697 * PARAMETERS :
8698 * @roi : cam_area_t struct to resize
8699 * @scalerCropRegion : cam_crop_region_t region to compare against
8700 *
8701 *
8702 *==========================================================================*/
8703bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8704 const cam_crop_region_t* scalerCropRegion)
8705{
8706 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8707 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8708 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8709 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8710
8711 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8712 * without having this check the calculations below to validate if the roi
8713 * is inside scalar crop region will fail resulting in the roi not being
8714 * reset causing algorithm to continue to use stale roi window
8715 */
8716 if (roi->weight == 0) {
8717 return true;
8718 }
8719
8720 if ((roi_x_max < scalerCropRegion->left) ||
8721 // right edge of roi window is left of scalar crop's left edge
8722 (roi_y_max < scalerCropRegion->top) ||
8723 // bottom edge of roi window is above scalar crop's top edge
8724 (roi->rect.left > crop_x_max) ||
8725 // left edge of roi window is beyond(right) of scalar crop's right edge
8726 (roi->rect.top > crop_y_max)){
8727 // top edge of roi windo is above scalar crop's top edge
8728 return false;
8729 }
8730 if (roi->rect.left < scalerCropRegion->left) {
8731 roi->rect.left = scalerCropRegion->left;
8732 }
8733 if (roi->rect.top < scalerCropRegion->top) {
8734 roi->rect.top = scalerCropRegion->top;
8735 }
8736 if (roi_x_max > crop_x_max) {
8737 roi_x_max = crop_x_max;
8738 }
8739 if (roi_y_max > crop_y_max) {
8740 roi_y_max = crop_y_max;
8741 }
8742 roi->rect.width = roi_x_max - roi->rect.left;
8743 roi->rect.height = roi_y_max - roi->rect.top;
8744 return true;
8745}
8746
8747/*===========================================================================
8748 * FUNCTION : convertLandmarks
8749 *
8750 * DESCRIPTION: helper method to extract the landmarks from face detection info
8751 *
8752 * PARAMETERS :
8753 * @landmark_data : input landmark data to be converted
8754 * @landmarks : int32_t destination array
8755 *
8756 *
8757 *==========================================================================*/
8758void QCamera3HardwareInterface::convertLandmarks(
8759 cam_face_landmarks_info_t landmark_data,
8760 int32_t *landmarks)
8761{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008762 if (landmark_data.is_left_eye_valid) {
8763 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8764 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8765 } else {
8766 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8767 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8768 }
8769
8770 if (landmark_data.is_right_eye_valid) {
8771 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8772 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8773 } else {
8774 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8775 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8776 }
8777
8778 if (landmark_data.is_mouth_valid) {
8779 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8780 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8781 } else {
8782 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8783 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8784 }
8785}
8786
8787/*===========================================================================
8788 * FUNCTION : setInvalidLandmarks
8789 *
8790 * DESCRIPTION: helper method to set invalid landmarks
8791 *
8792 * PARAMETERS :
8793 * @landmarks : int32_t destination array
8794 *
8795 *
8796 *==========================================================================*/
8797void QCamera3HardwareInterface::setInvalidLandmarks(
8798 int32_t *landmarks)
8799{
8800 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8801 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8802 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8803 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8804 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8805 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008806}
8807
8808#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008809
8810/*===========================================================================
8811 * FUNCTION : getCapabilities
8812 *
8813 * DESCRIPTION: query camera capability from back-end
8814 *
8815 * PARAMETERS :
8816 * @ops : mm-interface ops structure
8817 * @cam_handle : camera handle for which we need capability
8818 *
8819 * RETURN : ptr type of capability structure
8820 * capability for success
8821 * NULL for failure
8822 *==========================================================================*/
8823cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8824 uint32_t cam_handle)
8825{
8826 int rc = NO_ERROR;
8827 QCamera3HeapMemory *capabilityHeap = NULL;
8828 cam_capability_t *cap_ptr = NULL;
8829
8830 if (ops == NULL) {
8831 LOGE("Invalid arguments");
8832 return NULL;
8833 }
8834
8835 capabilityHeap = new QCamera3HeapMemory(1);
8836 if (capabilityHeap == NULL) {
8837 LOGE("creation of capabilityHeap failed");
8838 return NULL;
8839 }
8840
8841 /* Allocate memory for capability buffer */
8842 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8843 if(rc != OK) {
8844 LOGE("No memory for cappability");
8845 goto allocate_failed;
8846 }
8847
8848 /* Map memory for capability buffer */
8849 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8850
8851 rc = ops->map_buf(cam_handle,
8852 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8853 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8854 if(rc < 0) {
8855 LOGE("failed to map capability buffer");
8856 rc = FAILED_TRANSACTION;
8857 goto map_failed;
8858 }
8859
8860 /* Query Capability */
8861 rc = ops->query_capability(cam_handle);
8862 if(rc < 0) {
8863 LOGE("failed to query capability");
8864 rc = FAILED_TRANSACTION;
8865 goto query_failed;
8866 }
8867
8868 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8869 if (cap_ptr == NULL) {
8870 LOGE("out of memory");
8871 rc = NO_MEMORY;
8872 goto query_failed;
8873 }
8874
8875 memset(cap_ptr, 0, sizeof(cam_capability_t));
8876 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8877
8878 int index;
8879 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8880 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8881 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8882 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8883 }
8884
8885query_failed:
8886 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8887map_failed:
8888 capabilityHeap->deallocate();
8889allocate_failed:
8890 delete capabilityHeap;
8891
8892 if (rc != NO_ERROR) {
8893 return NULL;
8894 } else {
8895 return cap_ptr;
8896 }
8897}
8898
Thierry Strudel3d639192016-09-09 11:52:26 -07008899/*===========================================================================
8900 * FUNCTION : initCapabilities
8901 *
8902 * DESCRIPTION: initialize camera capabilities in static data struct
8903 *
8904 * PARAMETERS :
8905 * @cameraId : camera Id
8906 *
8907 * RETURN : int32_t type of status
8908 * NO_ERROR -- success
8909 * none-zero failure code
8910 *==========================================================================*/
8911int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8912{
8913 int rc = 0;
8914 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008915 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008916
8917 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8918 if (rc) {
8919 LOGE("camera_open failed. rc = %d", rc);
8920 goto open_failed;
8921 }
8922 if (!cameraHandle) {
8923 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8924 goto open_failed;
8925 }
8926
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008927 handle = get_main_camera_handle(cameraHandle->camera_handle);
8928 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8929 if (gCamCapability[cameraId] == NULL) {
8930 rc = FAILED_TRANSACTION;
8931 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008932 }
8933
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008934 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008935 if (is_dual_camera_by_idx(cameraId)) {
8936 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8937 gCamCapability[cameraId]->aux_cam_cap =
8938 getCapabilities(cameraHandle->ops, handle);
8939 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8940 rc = FAILED_TRANSACTION;
8941 free(gCamCapability[cameraId]);
8942 goto failed_op;
8943 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008944
8945 // Copy the main camera capability to main_cam_cap struct
8946 gCamCapability[cameraId]->main_cam_cap =
8947 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8948 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8949 LOGE("out of memory");
8950 rc = NO_MEMORY;
8951 goto failed_op;
8952 }
8953 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8954 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008955 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008956failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008957 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8958 cameraHandle = NULL;
8959open_failed:
8960 return rc;
8961}
8962
8963/*==========================================================================
8964 * FUNCTION : get3Aversion
8965 *
8966 * DESCRIPTION: get the Q3A S/W version
8967 *
8968 * PARAMETERS :
8969 * @sw_version: Reference of Q3A structure which will hold version info upon
8970 * return
8971 *
8972 * RETURN : None
8973 *
8974 *==========================================================================*/
8975void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8976{
8977 if(gCamCapability[mCameraId])
8978 sw_version = gCamCapability[mCameraId]->q3a_version;
8979 else
8980 LOGE("Capability structure NULL!");
8981}
8982
8983
8984/*===========================================================================
8985 * FUNCTION : initParameters
8986 *
8987 * DESCRIPTION: initialize camera parameters
8988 *
8989 * PARAMETERS :
8990 *
8991 * RETURN : int32_t type of status
8992 * NO_ERROR -- success
8993 * none-zero failure code
8994 *==========================================================================*/
8995int QCamera3HardwareInterface::initParameters()
8996{
8997 int rc = 0;
8998
8999 //Allocate Set Param Buffer
9000 mParamHeap = new QCamera3HeapMemory(1);
9001 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9002 if(rc != OK) {
9003 rc = NO_MEMORY;
9004 LOGE("Failed to allocate SETPARM Heap memory");
9005 delete mParamHeap;
9006 mParamHeap = NULL;
9007 return rc;
9008 }
9009
9010 //Map memory for parameters buffer
9011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9012 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9013 mParamHeap->getFd(0),
9014 sizeof(metadata_buffer_t),
9015 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9016 if(rc < 0) {
9017 LOGE("failed to map SETPARM buffer");
9018 rc = FAILED_TRANSACTION;
9019 mParamHeap->deallocate();
9020 delete mParamHeap;
9021 mParamHeap = NULL;
9022 return rc;
9023 }
9024
9025 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9026
9027 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9028 return rc;
9029}
9030
9031/*===========================================================================
9032 * FUNCTION : deinitParameters
9033 *
9034 * DESCRIPTION: de-initialize camera parameters
9035 *
9036 * PARAMETERS :
9037 *
9038 * RETURN : NONE
9039 *==========================================================================*/
9040void QCamera3HardwareInterface::deinitParameters()
9041{
9042 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9043 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9044
9045 mParamHeap->deallocate();
9046 delete mParamHeap;
9047 mParamHeap = NULL;
9048
9049 mParameters = NULL;
9050
9051 free(mPrevParameters);
9052 mPrevParameters = NULL;
9053}
9054
9055/*===========================================================================
9056 * FUNCTION : calcMaxJpegSize
9057 *
9058 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9059 *
9060 * PARAMETERS :
9061 *
9062 * RETURN : max_jpeg_size
9063 *==========================================================================*/
9064size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9065{
9066 size_t max_jpeg_size = 0;
9067 size_t temp_width, temp_height;
9068 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9069 MAX_SIZES_CNT);
9070 for (size_t i = 0; i < count; i++) {
9071 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9072 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9073 if (temp_width * temp_height > max_jpeg_size ) {
9074 max_jpeg_size = temp_width * temp_height;
9075 }
9076 }
9077 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9078 return max_jpeg_size;
9079}
9080
9081/*===========================================================================
9082 * FUNCTION : getMaxRawSize
9083 *
9084 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9085 *
9086 * PARAMETERS :
9087 *
9088 * RETURN : Largest supported Raw Dimension
9089 *==========================================================================*/
9090cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9091{
9092 int max_width = 0;
9093 cam_dimension_t maxRawSize;
9094
9095 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9096 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9097 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9098 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9099 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9100 }
9101 }
9102 return maxRawSize;
9103}
9104
9105
9106/*===========================================================================
9107 * FUNCTION : calcMaxJpegDim
9108 *
9109 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9110 *
9111 * PARAMETERS :
9112 *
9113 * RETURN : max_jpeg_dim
9114 *==========================================================================*/
9115cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9116{
9117 cam_dimension_t max_jpeg_dim;
9118 cam_dimension_t curr_jpeg_dim;
9119 max_jpeg_dim.width = 0;
9120 max_jpeg_dim.height = 0;
9121 curr_jpeg_dim.width = 0;
9122 curr_jpeg_dim.height = 0;
9123 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9124 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9125 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9126 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9127 max_jpeg_dim.width * max_jpeg_dim.height ) {
9128 max_jpeg_dim.width = curr_jpeg_dim.width;
9129 max_jpeg_dim.height = curr_jpeg_dim.height;
9130 }
9131 }
9132 return max_jpeg_dim;
9133}
9134
9135/*===========================================================================
9136 * FUNCTION : addStreamConfig
9137 *
9138 * DESCRIPTION: adds the stream configuration to the array
9139 *
9140 * PARAMETERS :
9141 * @available_stream_configs : pointer to stream configuration array
9142 * @scalar_format : scalar format
9143 * @dim : configuration dimension
9144 * @config_type : input or output configuration type
9145 *
9146 * RETURN : NONE
9147 *==========================================================================*/
9148void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9149 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9150{
9151 available_stream_configs.add(scalar_format);
9152 available_stream_configs.add(dim.width);
9153 available_stream_configs.add(dim.height);
9154 available_stream_configs.add(config_type);
9155}
9156
9157/*===========================================================================
9158 * FUNCTION : suppportBurstCapture
9159 *
9160 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9161 *
9162 * PARAMETERS :
9163 * @cameraId : camera Id
9164 *
9165 * RETURN : true if camera supports BURST_CAPTURE
9166 * false otherwise
9167 *==========================================================================*/
9168bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9169{
9170 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9171 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9172 const int32_t highResWidth = 3264;
9173 const int32_t highResHeight = 2448;
9174
9175 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9176 // Maximum resolution images cannot be captured at >= 10fps
9177 // -> not supporting BURST_CAPTURE
9178 return false;
9179 }
9180
9181 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9182 // Maximum resolution images can be captured at >= 20fps
9183 // --> supporting BURST_CAPTURE
9184 return true;
9185 }
9186
9187 // Find the smallest highRes resolution, or largest resolution if there is none
9188 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9189 MAX_SIZES_CNT);
9190 size_t highRes = 0;
9191 while ((highRes + 1 < totalCnt) &&
9192 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9193 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9194 highResWidth * highResHeight)) {
9195 highRes++;
9196 }
9197 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9198 return true;
9199 } else {
9200 return false;
9201 }
9202}
9203
9204/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009205 * FUNCTION : getPDStatIndex
9206 *
9207 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9208 *
9209 * PARAMETERS :
9210 * @caps : camera capabilities
9211 *
9212 * RETURN : int32_t type
9213 * non-negative - on success
9214 * -1 - on failure
9215 *==========================================================================*/
9216int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9217 if (nullptr == caps) {
9218 return -1;
9219 }
9220
9221 uint32_t metaRawCount = caps->meta_raw_channel_count;
9222 int32_t ret = -1;
9223 for (size_t i = 0; i < metaRawCount; i++) {
9224 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9225 ret = i;
9226 break;
9227 }
9228 }
9229
9230 return ret;
9231}
9232
9233/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009234 * FUNCTION : initStaticMetadata
9235 *
9236 * DESCRIPTION: initialize the static metadata
9237 *
9238 * PARAMETERS :
9239 * @cameraId : camera Id
9240 *
9241 * RETURN : int32_t type of status
9242 * 0 -- success
9243 * non-zero failure code
9244 *==========================================================================*/
9245int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9246{
9247 int rc = 0;
9248 CameraMetadata staticInfo;
9249 size_t count = 0;
9250 bool limitedDevice = false;
9251 char prop[PROPERTY_VALUE_MAX];
9252 bool supportBurst = false;
9253
9254 supportBurst = supportBurstCapture(cameraId);
9255
9256 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9257 * guaranteed or if min fps of max resolution is less than 20 fps, its
9258 * advertised as limited device*/
9259 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9260 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9261 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9262 !supportBurst;
9263
9264 uint8_t supportedHwLvl = limitedDevice ?
9265 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009266#ifndef USE_HAL_3_3
9267 // LEVEL_3 - This device will support level 3.
9268 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9269#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009271#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009272
9273 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9274 &supportedHwLvl, 1);
9275
9276 bool facingBack = false;
9277 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9278 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9279 facingBack = true;
9280 }
9281 /*HAL 3 only*/
9282 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9283 &gCamCapability[cameraId]->min_focus_distance, 1);
9284
9285 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9286 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9287
9288 /*should be using focal lengths but sensor doesn't provide that info now*/
9289 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9290 &gCamCapability[cameraId]->focal_length,
9291 1);
9292
9293 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9294 gCamCapability[cameraId]->apertures,
9295 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9296
9297 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9298 gCamCapability[cameraId]->filter_densities,
9299 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9300
9301
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009302 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9303 size_t mode_count =
9304 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9305 for (size_t i = 0; i < mode_count; i++) {
9306 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9307 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009308 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009309 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009310
9311 int32_t lens_shading_map_size[] = {
9312 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9313 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9314 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9315 lens_shading_map_size,
9316 sizeof(lens_shading_map_size)/sizeof(int32_t));
9317
9318 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9319 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9320
9321 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9322 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9323
9324 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9325 &gCamCapability[cameraId]->max_frame_duration, 1);
9326
9327 camera_metadata_rational baseGainFactor = {
9328 gCamCapability[cameraId]->base_gain_factor.numerator,
9329 gCamCapability[cameraId]->base_gain_factor.denominator};
9330 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9331 &baseGainFactor, 1);
9332
9333 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9334 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9335
9336 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9337 gCamCapability[cameraId]->pixel_array_size.height};
9338 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9339 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9340
9341 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9342 gCamCapability[cameraId]->active_array_size.top,
9343 gCamCapability[cameraId]->active_array_size.width,
9344 gCamCapability[cameraId]->active_array_size.height};
9345 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9346 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9347
9348 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9349 &gCamCapability[cameraId]->white_level, 1);
9350
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009351 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9352 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9353 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009354 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009355 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009356
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009357#ifndef USE_HAL_3_3
9358 bool hasBlackRegions = false;
9359 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9360 LOGW("black_region_count: %d is bounded to %d",
9361 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9362 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9363 }
9364 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9365 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9366 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9367 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9368 }
9369 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9370 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9371 hasBlackRegions = true;
9372 }
9373#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009374 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9375 &gCamCapability[cameraId]->flash_charge_duration, 1);
9376
9377 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9378 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9379
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009380 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9381 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9382 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9384 &timestampSource, 1);
9385
Thierry Strudel54dc9782017-02-15 12:12:10 -08009386 //update histogram vendor data
9387 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009388 &gCamCapability[cameraId]->histogram_size, 1);
9389
Thierry Strudel54dc9782017-02-15 12:12:10 -08009390 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009391 &gCamCapability[cameraId]->max_histogram_count, 1);
9392
Shuzhen Wang14415f52016-11-16 18:26:18 -08009393 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9394 //so that app can request fewer number of bins than the maximum supported.
9395 std::vector<int32_t> histBins;
9396 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9397 histBins.push_back(maxHistBins);
9398 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9399 (maxHistBins & 0x1) == 0) {
9400 histBins.push_back(maxHistBins >> 1);
9401 maxHistBins >>= 1;
9402 }
9403 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9404 histBins.data(), histBins.size());
9405
Thierry Strudel3d639192016-09-09 11:52:26 -07009406 int32_t sharpness_map_size[] = {
9407 gCamCapability[cameraId]->sharpness_map_size.width,
9408 gCamCapability[cameraId]->sharpness_map_size.height};
9409
9410 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9411 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9412
9413 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9414 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9415
Emilian Peev0f3c3162017-03-15 12:57:46 +00009416 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9417 if (0 <= indexPD) {
9418 // Advertise PD stats data as part of the Depth capabilities
9419 int32_t depthWidth =
9420 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9421 int32_t depthHeight =
9422 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009423 int32_t depthStride =
9424 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009425 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9426 assert(0 < depthSamplesCount);
9427 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9428 &depthSamplesCount, 1);
9429
9430 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9431 depthHeight,
9432 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9433 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9434 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9435 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9436 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9437
9438 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9439 depthHeight, 33333333,
9440 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9441 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9442 depthMinDuration,
9443 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9444
9445 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9446 depthHeight, 0,
9447 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9448 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9449 depthStallDuration,
9450 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9451
9452 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9453 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009454
9455 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9456 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9457 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009458 }
9459
Thierry Strudel3d639192016-09-09 11:52:26 -07009460 int32_t scalar_formats[] = {
9461 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9462 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9463 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9464 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9465 HAL_PIXEL_FORMAT_RAW10,
9466 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009467 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9468 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9469 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009470
9471 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9472 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9473 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9474 count, MAX_SIZES_CNT, available_processed_sizes);
9475 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9476 available_processed_sizes, count * 2);
9477
9478 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9479 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9480 makeTable(gCamCapability[cameraId]->raw_dim,
9481 count, MAX_SIZES_CNT, available_raw_sizes);
9482 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9483 available_raw_sizes, count * 2);
9484
9485 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9486 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9487 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9488 count, MAX_SIZES_CNT, available_fps_ranges);
9489 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9490 available_fps_ranges, count * 2);
9491
9492 camera_metadata_rational exposureCompensationStep = {
9493 gCamCapability[cameraId]->exp_compensation_step.numerator,
9494 gCamCapability[cameraId]->exp_compensation_step.denominator};
9495 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9496 &exposureCompensationStep, 1);
9497
9498 Vector<uint8_t> availableVstabModes;
9499 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9500 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009501 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009502 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009503 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009505 count = IS_TYPE_MAX;
9506 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9507 for (size_t i = 0; i < count; i++) {
9508 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9509 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9510 eisSupported = true;
9511 break;
9512 }
9513 }
9514 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009515 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9516 }
9517 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9518 availableVstabModes.array(), availableVstabModes.size());
9519
9520 /*HAL 1 and HAL 3 common*/
9521 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9522 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9523 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009524 // Cap the max zoom to the max preferred value
9525 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009526 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9527 &maxZoom, 1);
9528
9529 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9530 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9531
9532 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9533 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9534 max3aRegions[2] = 0; /* AF not supported */
9535 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9536 max3aRegions, 3);
9537
9538 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9539 memset(prop, 0, sizeof(prop));
9540 property_get("persist.camera.facedetect", prop, "1");
9541 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9542 LOGD("Support face detection mode: %d",
9543 supportedFaceDetectMode);
9544
9545 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009546 /* support mode should be OFF if max number of face is 0 */
9547 if (maxFaces <= 0) {
9548 supportedFaceDetectMode = 0;
9549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009550 Vector<uint8_t> availableFaceDetectModes;
9551 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9552 if (supportedFaceDetectMode == 1) {
9553 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9554 } else if (supportedFaceDetectMode == 2) {
9555 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9556 } else if (supportedFaceDetectMode == 3) {
9557 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9558 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9559 } else {
9560 maxFaces = 0;
9561 }
9562 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9563 availableFaceDetectModes.array(),
9564 availableFaceDetectModes.size());
9565 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9566 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009567 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9568 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9569 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009570
9571 int32_t exposureCompensationRange[] = {
9572 gCamCapability[cameraId]->exposure_compensation_min,
9573 gCamCapability[cameraId]->exposure_compensation_max};
9574 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9575 exposureCompensationRange,
9576 sizeof(exposureCompensationRange)/sizeof(int32_t));
9577
9578 uint8_t lensFacing = (facingBack) ?
9579 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9580 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9581
9582 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9583 available_thumbnail_sizes,
9584 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9585
9586 /*all sizes will be clubbed into this tag*/
9587 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9588 /*android.scaler.availableStreamConfigurations*/
9589 Vector<int32_t> available_stream_configs;
9590 cam_dimension_t active_array_dim;
9591 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9592 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009593
9594 /*advertise list of input dimensions supported based on below property.
9595 By default all sizes upto 5MP will be advertised.
9596 Note that the setprop resolution format should be WxH.
9597 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9598 To list all supported sizes, setprop needs to be set with "0x0" */
9599 cam_dimension_t minInputSize = {2592,1944}; //5MP
9600 memset(prop, 0, sizeof(prop));
9601 property_get("persist.camera.input.minsize", prop, "2592x1944");
9602 if (strlen(prop) > 0) {
9603 char *saveptr = NULL;
9604 char *token = strtok_r(prop, "x", &saveptr);
9605 if (token != NULL) {
9606 minInputSize.width = atoi(token);
9607 }
9608 token = strtok_r(NULL, "x", &saveptr);
9609 if (token != NULL) {
9610 minInputSize.height = atoi(token);
9611 }
9612 }
9613
Thierry Strudel3d639192016-09-09 11:52:26 -07009614 /* Add input/output stream configurations for each scalar formats*/
9615 for (size_t j = 0; j < scalar_formats_count; j++) {
9616 switch (scalar_formats[j]) {
9617 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9618 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9619 case HAL_PIXEL_FORMAT_RAW10:
9620 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9621 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9622 addStreamConfig(available_stream_configs, scalar_formats[j],
9623 gCamCapability[cameraId]->raw_dim[i],
9624 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9625 }
9626 break;
9627 case HAL_PIXEL_FORMAT_BLOB:
9628 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9629 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9630 addStreamConfig(available_stream_configs, scalar_formats[j],
9631 gCamCapability[cameraId]->picture_sizes_tbl[i],
9632 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9633 }
9634 break;
9635 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9636 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9637 default:
9638 cam_dimension_t largest_picture_size;
9639 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9640 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9641 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9642 addStreamConfig(available_stream_configs, scalar_formats[j],
9643 gCamCapability[cameraId]->picture_sizes_tbl[i],
9644 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009645 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009646 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9647 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009648 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9649 >= minInputSize.width) || (gCamCapability[cameraId]->
9650 picture_sizes_tbl[i].height >= minInputSize.height)) {
9651 addStreamConfig(available_stream_configs, scalar_formats[j],
9652 gCamCapability[cameraId]->picture_sizes_tbl[i],
9653 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9654 }
9655 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009656 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009657
Thierry Strudel3d639192016-09-09 11:52:26 -07009658 break;
9659 }
9660 }
9661
9662 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9663 available_stream_configs.array(), available_stream_configs.size());
9664 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9665 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9666
9667 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9668 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9669
9670 /* android.scaler.availableMinFrameDurations */
9671 Vector<int64_t> available_min_durations;
9672 for (size_t j = 0; j < scalar_formats_count; j++) {
9673 switch (scalar_formats[j]) {
9674 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9675 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9676 case HAL_PIXEL_FORMAT_RAW10:
9677 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9678 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9679 available_min_durations.add(scalar_formats[j]);
9680 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9681 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9682 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9683 }
9684 break;
9685 default:
9686 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9687 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9688 available_min_durations.add(scalar_formats[j]);
9689 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9690 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9691 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9692 }
9693 break;
9694 }
9695 }
9696 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9697 available_min_durations.array(), available_min_durations.size());
9698
9699 Vector<int32_t> available_hfr_configs;
9700 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9701 int32_t fps = 0;
9702 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9703 case CAM_HFR_MODE_60FPS:
9704 fps = 60;
9705 break;
9706 case CAM_HFR_MODE_90FPS:
9707 fps = 90;
9708 break;
9709 case CAM_HFR_MODE_120FPS:
9710 fps = 120;
9711 break;
9712 case CAM_HFR_MODE_150FPS:
9713 fps = 150;
9714 break;
9715 case CAM_HFR_MODE_180FPS:
9716 fps = 180;
9717 break;
9718 case CAM_HFR_MODE_210FPS:
9719 fps = 210;
9720 break;
9721 case CAM_HFR_MODE_240FPS:
9722 fps = 240;
9723 break;
9724 case CAM_HFR_MODE_480FPS:
9725 fps = 480;
9726 break;
9727 case CAM_HFR_MODE_OFF:
9728 case CAM_HFR_MODE_MAX:
9729 default:
9730 break;
9731 }
9732
9733 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9734 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9735 /* For each HFR frame rate, need to advertise one variable fps range
9736 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9737 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9738 * set by the app. When video recording is started, [120, 120] is
9739 * set. This way sensor configuration does not change when recording
9740 * is started */
9741
9742 /* (width, height, fps_min, fps_max, batch_size_max) */
9743 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9744 j < MAX_SIZES_CNT; j++) {
9745 available_hfr_configs.add(
9746 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9747 available_hfr_configs.add(
9748 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9749 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9750 available_hfr_configs.add(fps);
9751 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9752
9753 /* (width, height, fps_min, fps_max, batch_size_max) */
9754 available_hfr_configs.add(
9755 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9756 available_hfr_configs.add(
9757 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9758 available_hfr_configs.add(fps);
9759 available_hfr_configs.add(fps);
9760 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9761 }
9762 }
9763 }
9764 //Advertise HFR capability only if the property is set
9765 memset(prop, 0, sizeof(prop));
9766 property_get("persist.camera.hal3hfr.enable", prop, "1");
9767 uint8_t hfrEnable = (uint8_t)atoi(prop);
9768
9769 if(hfrEnable && available_hfr_configs.array()) {
9770 staticInfo.update(
9771 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9772 available_hfr_configs.array(), available_hfr_configs.size());
9773 }
9774
9775 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9776 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9777 &max_jpeg_size, 1);
9778
9779 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9780 size_t size = 0;
9781 count = CAM_EFFECT_MODE_MAX;
9782 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9783 for (size_t i = 0; i < count; i++) {
9784 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9785 gCamCapability[cameraId]->supported_effects[i]);
9786 if (NAME_NOT_FOUND != val) {
9787 avail_effects[size] = (uint8_t)val;
9788 size++;
9789 }
9790 }
9791 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9792 avail_effects,
9793 size);
9794
9795 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9796 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9797 size_t supported_scene_modes_cnt = 0;
9798 count = CAM_SCENE_MODE_MAX;
9799 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9800 for (size_t i = 0; i < count; i++) {
9801 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9802 CAM_SCENE_MODE_OFF) {
9803 int val = lookupFwkName(SCENE_MODES_MAP,
9804 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9805 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009806
Thierry Strudel3d639192016-09-09 11:52:26 -07009807 if (NAME_NOT_FOUND != val) {
9808 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9809 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9810 supported_scene_modes_cnt++;
9811 }
9812 }
9813 }
9814 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9815 avail_scene_modes,
9816 supported_scene_modes_cnt);
9817
9818 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9819 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9820 supported_scene_modes_cnt,
9821 CAM_SCENE_MODE_MAX,
9822 scene_mode_overrides,
9823 supported_indexes,
9824 cameraId);
9825
9826 if (supported_scene_modes_cnt == 0) {
9827 supported_scene_modes_cnt = 1;
9828 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9829 }
9830
9831 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9832 scene_mode_overrides, supported_scene_modes_cnt * 3);
9833
9834 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9835 ANDROID_CONTROL_MODE_AUTO,
9836 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9837 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9838 available_control_modes,
9839 3);
9840
9841 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9842 size = 0;
9843 count = CAM_ANTIBANDING_MODE_MAX;
9844 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9845 for (size_t i = 0; i < count; i++) {
9846 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9847 gCamCapability[cameraId]->supported_antibandings[i]);
9848 if (NAME_NOT_FOUND != val) {
9849 avail_antibanding_modes[size] = (uint8_t)val;
9850 size++;
9851 }
9852
9853 }
9854 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9855 avail_antibanding_modes,
9856 size);
9857
9858 uint8_t avail_abberation_modes[] = {
9859 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9860 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9861 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9862 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9863 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9864 if (0 == count) {
9865 // If no aberration correction modes are available for a device, this advertise OFF mode
9866 size = 1;
9867 } else {
9868 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9869 // So, advertize all 3 modes if atleast any one mode is supported as per the
9870 // new M requirement
9871 size = 3;
9872 }
9873 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9874 avail_abberation_modes,
9875 size);
9876
9877 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9878 size = 0;
9879 count = CAM_FOCUS_MODE_MAX;
9880 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9881 for (size_t i = 0; i < count; i++) {
9882 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9883 gCamCapability[cameraId]->supported_focus_modes[i]);
9884 if (NAME_NOT_FOUND != val) {
9885 avail_af_modes[size] = (uint8_t)val;
9886 size++;
9887 }
9888 }
9889 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9890 avail_af_modes,
9891 size);
9892
9893 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9894 size = 0;
9895 count = CAM_WB_MODE_MAX;
9896 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9897 for (size_t i = 0; i < count; i++) {
9898 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9899 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9900 gCamCapability[cameraId]->supported_white_balances[i]);
9901 if (NAME_NOT_FOUND != val) {
9902 avail_awb_modes[size] = (uint8_t)val;
9903 size++;
9904 }
9905 }
9906 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9907 avail_awb_modes,
9908 size);
9909
9910 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9911 count = CAM_FLASH_FIRING_LEVEL_MAX;
9912 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9913 count);
9914 for (size_t i = 0; i < count; i++) {
9915 available_flash_levels[i] =
9916 gCamCapability[cameraId]->supported_firing_levels[i];
9917 }
9918 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9919 available_flash_levels, count);
9920
9921 uint8_t flashAvailable;
9922 if (gCamCapability[cameraId]->flash_available)
9923 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9924 else
9925 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9926 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9927 &flashAvailable, 1);
9928
9929 Vector<uint8_t> avail_ae_modes;
9930 count = CAM_AE_MODE_MAX;
9931 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9932 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009933 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9934 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9935 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9936 }
9937 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009938 }
9939 if (flashAvailable) {
9940 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9941 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9942 }
9943 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9944 avail_ae_modes.array(),
9945 avail_ae_modes.size());
9946
9947 int32_t sensitivity_range[2];
9948 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9949 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9950 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9951 sensitivity_range,
9952 sizeof(sensitivity_range) / sizeof(int32_t));
9953
9954 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9955 &gCamCapability[cameraId]->max_analog_sensitivity,
9956 1);
9957
9958 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9959 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9960 &sensor_orientation,
9961 1);
9962
9963 int32_t max_output_streams[] = {
9964 MAX_STALLING_STREAMS,
9965 MAX_PROCESSED_STREAMS,
9966 MAX_RAW_STREAMS};
9967 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9968 max_output_streams,
9969 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9970
9971 uint8_t avail_leds = 0;
9972 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9973 &avail_leds, 0);
9974
9975 uint8_t focus_dist_calibrated;
9976 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9977 gCamCapability[cameraId]->focus_dist_calibrated);
9978 if (NAME_NOT_FOUND != val) {
9979 focus_dist_calibrated = (uint8_t)val;
9980 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9981 &focus_dist_calibrated, 1);
9982 }
9983
9984 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9985 size = 0;
9986 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9987 MAX_TEST_PATTERN_CNT);
9988 for (size_t i = 0; i < count; i++) {
9989 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9990 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9991 if (NAME_NOT_FOUND != testpatternMode) {
9992 avail_testpattern_modes[size] = testpatternMode;
9993 size++;
9994 }
9995 }
9996 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9997 avail_testpattern_modes,
9998 size);
9999
10000 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10001 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10002 &max_pipeline_depth,
10003 1);
10004
10005 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10006 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10007 &partial_result_count,
10008 1);
10009
10010 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10011 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10012
10013 Vector<uint8_t> available_capabilities;
10014 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10015 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10016 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10017 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10018 if (supportBurst) {
10019 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10020 }
10021 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10022 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10023 if (hfrEnable && available_hfr_configs.array()) {
10024 available_capabilities.add(
10025 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10026 }
10027
10028 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10029 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10030 }
10031 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10032 available_capabilities.array(),
10033 available_capabilities.size());
10034
10035 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10036 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10037 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10038 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10039
10040 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10041 &aeLockAvailable, 1);
10042
10043 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10044 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10045 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10046 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10047
10048 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10049 &awbLockAvailable, 1);
10050
10051 int32_t max_input_streams = 1;
10052 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10053 &max_input_streams,
10054 1);
10055
10056 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10057 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10058 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10059 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10060 HAL_PIXEL_FORMAT_YCbCr_420_888};
10061 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10062 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10063
10064 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10065 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10066 &max_latency,
10067 1);
10068
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010069#ifndef USE_HAL_3_3
10070 int32_t isp_sensitivity_range[2];
10071 isp_sensitivity_range[0] =
10072 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10073 isp_sensitivity_range[1] =
10074 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10075 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10076 isp_sensitivity_range,
10077 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10078#endif
10079
Thierry Strudel3d639192016-09-09 11:52:26 -070010080 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10081 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10082 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10083 available_hot_pixel_modes,
10084 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10085
10086 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10087 ANDROID_SHADING_MODE_FAST,
10088 ANDROID_SHADING_MODE_HIGH_QUALITY};
10089 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10090 available_shading_modes,
10091 3);
10092
10093 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10094 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10095 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10096 available_lens_shading_map_modes,
10097 2);
10098
10099 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10100 ANDROID_EDGE_MODE_FAST,
10101 ANDROID_EDGE_MODE_HIGH_QUALITY,
10102 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10103 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10104 available_edge_modes,
10105 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10106
10107 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10108 ANDROID_NOISE_REDUCTION_MODE_FAST,
10109 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10110 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10111 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10112 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10113 available_noise_red_modes,
10114 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10115
10116 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10117 ANDROID_TONEMAP_MODE_FAST,
10118 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10119 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10120 available_tonemap_modes,
10121 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10122
10123 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10124 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10125 available_hot_pixel_map_modes,
10126 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10127
10128 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10129 gCamCapability[cameraId]->reference_illuminant1);
10130 if (NAME_NOT_FOUND != val) {
10131 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10132 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10133 }
10134
10135 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10136 gCamCapability[cameraId]->reference_illuminant2);
10137 if (NAME_NOT_FOUND != val) {
10138 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10139 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10140 }
10141
10142 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10143 (void *)gCamCapability[cameraId]->forward_matrix1,
10144 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10145
10146 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10147 (void *)gCamCapability[cameraId]->forward_matrix2,
10148 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10149
10150 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10151 (void *)gCamCapability[cameraId]->color_transform1,
10152 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10153
10154 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10155 (void *)gCamCapability[cameraId]->color_transform2,
10156 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10157
10158 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10159 (void *)gCamCapability[cameraId]->calibration_transform1,
10160 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10161
10162 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10163 (void *)gCamCapability[cameraId]->calibration_transform2,
10164 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10165
10166 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10167 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10168 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10169 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10170 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10171 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10172 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10173 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10174 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10175 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10176 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10177 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10178 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10179 ANDROID_JPEG_GPS_COORDINATES,
10180 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10181 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10182 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10183 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10184 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10185 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10186 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10187 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10188 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10189 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010190#ifndef USE_HAL_3_3
10191 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10192#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010193 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010194 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010195 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10196 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010197 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010198 /* DevCamDebug metadata request_keys_basic */
10199 DEVCAMDEBUG_META_ENABLE,
10200 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010201 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010202 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010203 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010204 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010205 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010206 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010207
10208 size_t request_keys_cnt =
10209 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10210 Vector<int32_t> available_request_keys;
10211 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10212 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10213 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10214 }
10215
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010216 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010217 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010218 }
10219
Thierry Strudel3d639192016-09-09 11:52:26 -070010220 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10221 available_request_keys.array(), available_request_keys.size());
10222
10223 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10224 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10225 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10226 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10227 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10228 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10229 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10230 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10231 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10232 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10233 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10234 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10235 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10236 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10237 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10238 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10239 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010240 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010241 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10242 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10243 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010244 ANDROID_STATISTICS_FACE_SCORES,
10245#ifndef USE_HAL_3_3
10246 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10247#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010248 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010249 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010250 // DevCamDebug metadata result_keys_basic
10251 DEVCAMDEBUG_META_ENABLE,
10252 // DevCamDebug metadata result_keys AF
10253 DEVCAMDEBUG_AF_LENS_POSITION,
10254 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10255 DEVCAMDEBUG_AF_TOF_DISTANCE,
10256 DEVCAMDEBUG_AF_LUMA,
10257 DEVCAMDEBUG_AF_HAF_STATE,
10258 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10259 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10260 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10261 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10262 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10263 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10264 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10265 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10266 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10267 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10268 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10269 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10270 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10271 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10272 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10273 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10274 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10275 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10276 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10277 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10278 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10279 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10280 // DevCamDebug metadata result_keys AEC
10281 DEVCAMDEBUG_AEC_TARGET_LUMA,
10282 DEVCAMDEBUG_AEC_COMP_LUMA,
10283 DEVCAMDEBUG_AEC_AVG_LUMA,
10284 DEVCAMDEBUG_AEC_CUR_LUMA,
10285 DEVCAMDEBUG_AEC_LINECOUNT,
10286 DEVCAMDEBUG_AEC_REAL_GAIN,
10287 DEVCAMDEBUG_AEC_EXP_INDEX,
10288 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010289 // DevCamDebug metadata result_keys zzHDR
10290 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10291 DEVCAMDEBUG_AEC_L_LINECOUNT,
10292 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10293 DEVCAMDEBUG_AEC_S_LINECOUNT,
10294 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10295 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10296 // DevCamDebug metadata result_keys ADRC
10297 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10298 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10299 DEVCAMDEBUG_AEC_GTM_RATIO,
10300 DEVCAMDEBUG_AEC_LTM_RATIO,
10301 DEVCAMDEBUG_AEC_LA_RATIO,
10302 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010303 // DevCamDebug metadata result_keys AEC MOTION
10304 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10305 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10306 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010307 // DevCamDebug metadata result_keys AWB
10308 DEVCAMDEBUG_AWB_R_GAIN,
10309 DEVCAMDEBUG_AWB_G_GAIN,
10310 DEVCAMDEBUG_AWB_B_GAIN,
10311 DEVCAMDEBUG_AWB_CCT,
10312 DEVCAMDEBUG_AWB_DECISION,
10313 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010314 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10315 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10316 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010317 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010318 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010319 };
10320
Thierry Strudel3d639192016-09-09 11:52:26 -070010321 size_t result_keys_cnt =
10322 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10323
10324 Vector<int32_t> available_result_keys;
10325 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10326 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10327 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10328 }
10329 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10330 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10331 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10332 }
10333 if (supportedFaceDetectMode == 1) {
10334 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10335 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10336 } else if ((supportedFaceDetectMode == 2) ||
10337 (supportedFaceDetectMode == 3)) {
10338 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10339 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10340 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010341#ifndef USE_HAL_3_3
10342 if (hasBlackRegions) {
10343 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10344 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10345 }
10346#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010347
10348 if (gExposeEnableZslKey) {
10349 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10350 }
10351
Thierry Strudel3d639192016-09-09 11:52:26 -070010352 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10353 available_result_keys.array(), available_result_keys.size());
10354
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010355 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010356 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10357 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10358 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10359 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10360 ANDROID_SCALER_CROPPING_TYPE,
10361 ANDROID_SYNC_MAX_LATENCY,
10362 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10363 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10364 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10365 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10366 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10367 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10368 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10369 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10370 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10371 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10372 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10373 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10374 ANDROID_LENS_FACING,
10375 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10376 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10377 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10378 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10379 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10380 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10381 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10382 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10383 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10384 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10385 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10386 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10387 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10388 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10389 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10390 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10391 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10392 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10393 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10394 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010395 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010396 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10397 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10398 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10399 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10400 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10401 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10402 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10403 ANDROID_CONTROL_AVAILABLE_MODES,
10404 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10405 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10406 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10407 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010408 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10409#ifndef USE_HAL_3_3
10410 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10411 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10412#endif
10413 };
10414
10415 Vector<int32_t> available_characteristics_keys;
10416 available_characteristics_keys.appendArray(characteristics_keys_basic,
10417 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10418#ifndef USE_HAL_3_3
10419 if (hasBlackRegions) {
10420 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10421 }
10422#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010423
10424 if (0 <= indexPD) {
10425 int32_t depthKeys[] = {
10426 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10427 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10428 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10429 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10430 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10431 };
10432 available_characteristics_keys.appendArray(depthKeys,
10433 sizeof(depthKeys) / sizeof(depthKeys[0]));
10434 }
10435
Thierry Strudel3d639192016-09-09 11:52:26 -070010436 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010437 available_characteristics_keys.array(),
10438 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010439
10440 /*available stall durations depend on the hw + sw and will be different for different devices */
10441 /*have to add for raw after implementation*/
10442 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10443 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10444
10445 Vector<int64_t> available_stall_durations;
10446 for (uint32_t j = 0; j < stall_formats_count; j++) {
10447 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10448 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10449 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10450 available_stall_durations.add(stall_formats[j]);
10451 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10452 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10453 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10454 }
10455 } else {
10456 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10457 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10458 available_stall_durations.add(stall_formats[j]);
10459 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10460 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10461 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10462 }
10463 }
10464 }
10465 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10466 available_stall_durations.array(),
10467 available_stall_durations.size());
10468
10469 //QCAMERA3_OPAQUE_RAW
10470 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10471 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10472 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10473 case LEGACY_RAW:
10474 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10475 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10476 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10477 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10478 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10479 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10480 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10481 break;
10482 case MIPI_RAW:
10483 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10484 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10485 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10486 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10487 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10488 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10489 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10490 break;
10491 default:
10492 LOGE("unknown opaque_raw_format %d",
10493 gCamCapability[cameraId]->opaque_raw_fmt);
10494 break;
10495 }
10496 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10497
10498 Vector<int32_t> strides;
10499 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10500 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10501 cam_stream_buf_plane_info_t buf_planes;
10502 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10503 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10504 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10505 &gCamCapability[cameraId]->padding_info, &buf_planes);
10506 strides.add(buf_planes.plane_info.mp[0].stride);
10507 }
10508 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10509 strides.size());
10510
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010511 //TBD: remove the following line once backend advertises zzHDR in feature mask
10512 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010513 //Video HDR default
10514 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10515 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010516 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010517 int32_t vhdr_mode[] = {
10518 QCAMERA3_VIDEO_HDR_MODE_OFF,
10519 QCAMERA3_VIDEO_HDR_MODE_ON};
10520
10521 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10522 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10523 vhdr_mode, vhdr_mode_count);
10524 }
10525
Thierry Strudel3d639192016-09-09 11:52:26 -070010526 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10527 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10528 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10529
10530 uint8_t isMonoOnly =
10531 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10532 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10533 &isMonoOnly, 1);
10534
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010535#ifndef USE_HAL_3_3
10536 Vector<int32_t> opaque_size;
10537 for (size_t j = 0; j < scalar_formats_count; j++) {
10538 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10539 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10540 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10541 cam_stream_buf_plane_info_t buf_planes;
10542
10543 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10544 &gCamCapability[cameraId]->padding_info, &buf_planes);
10545
10546 if (rc == 0) {
10547 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10548 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10549 opaque_size.add(buf_planes.plane_info.frame_len);
10550 }else {
10551 LOGE("raw frame calculation failed!");
10552 }
10553 }
10554 }
10555 }
10556
10557 if ((opaque_size.size() > 0) &&
10558 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10559 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10560 else
10561 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10562#endif
10563
Thierry Strudel04e026f2016-10-10 11:27:36 -070010564 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10565 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10566 size = 0;
10567 count = CAM_IR_MODE_MAX;
10568 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10569 for (size_t i = 0; i < count; i++) {
10570 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10571 gCamCapability[cameraId]->supported_ir_modes[i]);
10572 if (NAME_NOT_FOUND != val) {
10573 avail_ir_modes[size] = (int32_t)val;
10574 size++;
10575 }
10576 }
10577 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10578 avail_ir_modes, size);
10579 }
10580
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010581 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10582 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10583 size = 0;
10584 count = CAM_AEC_CONVERGENCE_MAX;
10585 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10586 for (size_t i = 0; i < count; i++) {
10587 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10588 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10589 if (NAME_NOT_FOUND != val) {
10590 available_instant_aec_modes[size] = (int32_t)val;
10591 size++;
10592 }
10593 }
10594 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10595 available_instant_aec_modes, size);
10596 }
10597
Thierry Strudel54dc9782017-02-15 12:12:10 -080010598 int32_t sharpness_range[] = {
10599 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10600 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10601 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10602
10603 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10604 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10605 size = 0;
10606 count = CAM_BINNING_CORRECTION_MODE_MAX;
10607 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10608 for (size_t i = 0; i < count; i++) {
10609 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10610 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10611 gCamCapability[cameraId]->supported_binning_modes[i]);
10612 if (NAME_NOT_FOUND != val) {
10613 avail_binning_modes[size] = (int32_t)val;
10614 size++;
10615 }
10616 }
10617 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10618 avail_binning_modes, size);
10619 }
10620
10621 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10622 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10623 size = 0;
10624 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10625 for (size_t i = 0; i < count; i++) {
10626 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10627 gCamCapability[cameraId]->supported_aec_modes[i]);
10628 if (NAME_NOT_FOUND != val)
10629 available_aec_modes[size++] = val;
10630 }
10631 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10632 available_aec_modes, size);
10633 }
10634
10635 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10636 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10637 size = 0;
10638 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10639 for (size_t i = 0; i < count; i++) {
10640 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10641 gCamCapability[cameraId]->supported_iso_modes[i]);
10642 if (NAME_NOT_FOUND != val)
10643 available_iso_modes[size++] = val;
10644 }
10645 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10646 available_iso_modes, size);
10647 }
10648
10649 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010650 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010651 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10652 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10653 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10654
10655 int32_t available_saturation_range[4];
10656 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10657 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10658 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10659 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10660 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10661 available_saturation_range, 4);
10662
10663 uint8_t is_hdr_values[2];
10664 is_hdr_values[0] = 0;
10665 is_hdr_values[1] = 1;
10666 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10667 is_hdr_values, 2);
10668
10669 float is_hdr_confidence_range[2];
10670 is_hdr_confidence_range[0] = 0.0;
10671 is_hdr_confidence_range[1] = 1.0;
10672 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10673 is_hdr_confidence_range, 2);
10674
Emilian Peev0a972ef2017-03-16 10:25:53 +000010675 size_t eepromLength = strnlen(
10676 reinterpret_cast<const char *>(
10677 gCamCapability[cameraId]->eeprom_version_info),
10678 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10679 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010680 char easelInfo[] = ",E:N";
10681 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10682 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10683 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010684 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
10685 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010686 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010687 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010688 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10689 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10690 }
10691
Thierry Strudel3d639192016-09-09 11:52:26 -070010692 gStaticMetadata[cameraId] = staticInfo.release();
10693 return rc;
10694}
10695
10696/*===========================================================================
10697 * FUNCTION : makeTable
10698 *
10699 * DESCRIPTION: make a table of sizes
10700 *
10701 * PARAMETERS :
10702 *
10703 *
10704 *==========================================================================*/
10705void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10706 size_t max_size, int32_t *sizeTable)
10707{
10708 size_t j = 0;
10709 if (size > max_size) {
10710 size = max_size;
10711 }
10712 for (size_t i = 0; i < size; i++) {
10713 sizeTable[j] = dimTable[i].width;
10714 sizeTable[j+1] = dimTable[i].height;
10715 j+=2;
10716 }
10717}
10718
10719/*===========================================================================
10720 * FUNCTION : makeFPSTable
10721 *
10722 * DESCRIPTION: make a table of fps ranges
10723 *
10724 * PARAMETERS :
10725 *
10726 *==========================================================================*/
10727void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10728 size_t max_size, int32_t *fpsRangesTable)
10729{
10730 size_t j = 0;
10731 if (size > max_size) {
10732 size = max_size;
10733 }
10734 for (size_t i = 0; i < size; i++) {
10735 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10736 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10737 j+=2;
10738 }
10739}
10740
10741/*===========================================================================
10742 * FUNCTION : makeOverridesList
10743 *
10744 * DESCRIPTION: make a list of scene mode overrides
10745 *
10746 * PARAMETERS :
10747 *
10748 *
10749 *==========================================================================*/
10750void QCamera3HardwareInterface::makeOverridesList(
10751 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10752 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10753{
10754 /*daemon will give a list of overrides for all scene modes.
10755 However we should send the fwk only the overrides for the scene modes
10756 supported by the framework*/
10757 size_t j = 0;
10758 if (size > max_size) {
10759 size = max_size;
10760 }
10761 size_t focus_count = CAM_FOCUS_MODE_MAX;
10762 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10763 focus_count);
10764 for (size_t i = 0; i < size; i++) {
10765 bool supt = false;
10766 size_t index = supported_indexes[i];
10767 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10768 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10769 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10770 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10771 overridesTable[index].awb_mode);
10772 if (NAME_NOT_FOUND != val) {
10773 overridesList[j+1] = (uint8_t)val;
10774 }
10775 uint8_t focus_override = overridesTable[index].af_mode;
10776 for (size_t k = 0; k < focus_count; k++) {
10777 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10778 supt = true;
10779 break;
10780 }
10781 }
10782 if (supt) {
10783 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10784 focus_override);
10785 if (NAME_NOT_FOUND != val) {
10786 overridesList[j+2] = (uint8_t)val;
10787 }
10788 } else {
10789 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10790 }
10791 j+=3;
10792 }
10793}
10794
10795/*===========================================================================
10796 * FUNCTION : filterJpegSizes
10797 *
10798 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10799 * could be downscaled to
10800 *
10801 * PARAMETERS :
10802 *
10803 * RETURN : length of jpegSizes array
10804 *==========================================================================*/
10805
10806size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10807 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10808 uint8_t downscale_factor)
10809{
10810 if (0 == downscale_factor) {
10811 downscale_factor = 1;
10812 }
10813
10814 int32_t min_width = active_array_size.width / downscale_factor;
10815 int32_t min_height = active_array_size.height / downscale_factor;
10816 size_t jpegSizesCnt = 0;
10817 if (processedSizesCnt > maxCount) {
10818 processedSizesCnt = maxCount;
10819 }
10820 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10821 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10822 jpegSizes[jpegSizesCnt] = processedSizes[i];
10823 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10824 jpegSizesCnt += 2;
10825 }
10826 }
10827 return jpegSizesCnt;
10828}
10829
10830/*===========================================================================
10831 * FUNCTION : computeNoiseModelEntryS
10832 *
10833 * DESCRIPTION: function to map a given sensitivity to the S noise
10834 * model parameters in the DNG noise model.
10835 *
10836 * PARAMETERS : sens : the sensor sensitivity
10837 *
10838 ** RETURN : S (sensor amplification) noise
10839 *
10840 *==========================================================================*/
10841double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10842 double s = gCamCapability[mCameraId]->gradient_S * sens +
10843 gCamCapability[mCameraId]->offset_S;
10844 return ((s < 0.0) ? 0.0 : s);
10845}
10846
10847/*===========================================================================
10848 * FUNCTION : computeNoiseModelEntryO
10849 *
10850 * DESCRIPTION: function to map a given sensitivity to the O noise
10851 * model parameters in the DNG noise model.
10852 *
10853 * PARAMETERS : sens : the sensor sensitivity
10854 *
10855 ** RETURN : O (sensor readout) noise
10856 *
10857 *==========================================================================*/
10858double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10859 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10860 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10861 1.0 : (1.0 * sens / max_analog_sens);
10862 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10863 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10864 return ((o < 0.0) ? 0.0 : o);
10865}
10866
10867/*===========================================================================
10868 * FUNCTION : getSensorSensitivity
10869 *
10870 * DESCRIPTION: convert iso_mode to an integer value
10871 *
10872 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10873 *
10874 ** RETURN : sensitivity supported by sensor
10875 *
10876 *==========================================================================*/
10877int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10878{
10879 int32_t sensitivity;
10880
10881 switch (iso_mode) {
10882 case CAM_ISO_MODE_100:
10883 sensitivity = 100;
10884 break;
10885 case CAM_ISO_MODE_200:
10886 sensitivity = 200;
10887 break;
10888 case CAM_ISO_MODE_400:
10889 sensitivity = 400;
10890 break;
10891 case CAM_ISO_MODE_800:
10892 sensitivity = 800;
10893 break;
10894 case CAM_ISO_MODE_1600:
10895 sensitivity = 1600;
10896 break;
10897 default:
10898 sensitivity = -1;
10899 break;
10900 }
10901 return sensitivity;
10902}
10903
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010904int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010905 if (gEaselManagerClient == nullptr) {
10906 gEaselManagerClient = EaselManagerClient::create();
10907 if (gEaselManagerClient == nullptr) {
10908 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10909 return -ENODEV;
10910 }
10911 }
10912
10913 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010914 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10915 // to connect to Easel.
10916 bool doNotpowerOnEasel =
10917 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10918
10919 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010920 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10921 return OK;
10922 }
10923
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010924 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010925 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010926 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010927 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010928 return res;
10929 }
10930
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010931 EaselManagerClientOpened = true;
10932
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010933 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010934 if (res != OK) {
10935 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10936 }
10937
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010938 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010939 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010940
10941 // Expose enableZsl key only when HDR+ mode is enabled.
10942 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010943 }
10944
10945 return OK;
10946}
10947
Thierry Strudel3d639192016-09-09 11:52:26 -070010948/*===========================================================================
10949 * FUNCTION : getCamInfo
10950 *
10951 * DESCRIPTION: query camera capabilities
10952 *
10953 * PARAMETERS :
10954 * @cameraId : camera Id
10955 * @info : camera info struct to be filled in with camera capabilities
10956 *
10957 * RETURN : int type of status
10958 * NO_ERROR -- success
10959 * none-zero failure code
10960 *==========================================================================*/
10961int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10962 struct camera_info *info)
10963{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010964 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010965 int rc = 0;
10966
10967 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010968
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010969 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070010970 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010971 rc = initHdrPlusClientLocked();
10972 if (rc != OK) {
10973 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10974 pthread_mutex_unlock(&gCamLock);
10975 return rc;
10976 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010977 }
10978
Thierry Strudel3d639192016-09-09 11:52:26 -070010979 if (NULL == gCamCapability[cameraId]) {
10980 rc = initCapabilities(cameraId);
10981 if (rc < 0) {
10982 pthread_mutex_unlock(&gCamLock);
10983 return rc;
10984 }
10985 }
10986
10987 if (NULL == gStaticMetadata[cameraId]) {
10988 rc = initStaticMetadata(cameraId);
10989 if (rc < 0) {
10990 pthread_mutex_unlock(&gCamLock);
10991 return rc;
10992 }
10993 }
10994
10995 switch(gCamCapability[cameraId]->position) {
10996 case CAM_POSITION_BACK:
10997 case CAM_POSITION_BACK_AUX:
10998 info->facing = CAMERA_FACING_BACK;
10999 break;
11000
11001 case CAM_POSITION_FRONT:
11002 case CAM_POSITION_FRONT_AUX:
11003 info->facing = CAMERA_FACING_FRONT;
11004 break;
11005
11006 default:
11007 LOGE("Unknown position type %d for camera id:%d",
11008 gCamCapability[cameraId]->position, cameraId);
11009 rc = -1;
11010 break;
11011 }
11012
11013
11014 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011015#ifndef USE_HAL_3_3
11016 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11017#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011018 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011019#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 info->static_camera_characteristics = gStaticMetadata[cameraId];
11021
11022 //For now assume both cameras can operate independently.
11023 info->conflicting_devices = NULL;
11024 info->conflicting_devices_length = 0;
11025
11026 //resource cost is 100 * MIN(1.0, m/M),
11027 //where m is throughput requirement with maximum stream configuration
11028 //and M is CPP maximum throughput.
11029 float max_fps = 0.0;
11030 for (uint32_t i = 0;
11031 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11032 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11033 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11034 }
11035 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11036 gCamCapability[cameraId]->active_array_size.width *
11037 gCamCapability[cameraId]->active_array_size.height * max_fps /
11038 gCamCapability[cameraId]->max_pixel_bandwidth;
11039 info->resource_cost = 100 * MIN(1.0, ratio);
11040 LOGI("camera %d resource cost is %d", cameraId,
11041 info->resource_cost);
11042
11043 pthread_mutex_unlock(&gCamLock);
11044 return rc;
11045}
11046
11047/*===========================================================================
11048 * FUNCTION : translateCapabilityToMetadata
11049 *
11050 * DESCRIPTION: translate the capability into camera_metadata_t
11051 *
11052 * PARAMETERS : type of the request
11053 *
11054 *
11055 * RETURN : success: camera_metadata_t*
11056 * failure: NULL
11057 *
11058 *==========================================================================*/
11059camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11060{
11061 if (mDefaultMetadata[type] != NULL) {
11062 return mDefaultMetadata[type];
11063 }
11064 //first time we are handling this request
11065 //fill up the metadata structure using the wrapper class
11066 CameraMetadata settings;
11067 //translate from cam_capability_t to camera_metadata_tag_t
11068 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11069 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11070 int32_t defaultRequestID = 0;
11071 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11072
11073 /* OIS disable */
11074 char ois_prop[PROPERTY_VALUE_MAX];
11075 memset(ois_prop, 0, sizeof(ois_prop));
11076 property_get("persist.camera.ois.disable", ois_prop, "0");
11077 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11078
11079 /* Force video to use OIS */
11080 char videoOisProp[PROPERTY_VALUE_MAX];
11081 memset(videoOisProp, 0, sizeof(videoOisProp));
11082 property_get("persist.camera.ois.video", videoOisProp, "1");
11083 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011084
11085 // Hybrid AE enable/disable
11086 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11087 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11088 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
11089 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
11090
Thierry Strudel3d639192016-09-09 11:52:26 -070011091 uint8_t controlIntent = 0;
11092 uint8_t focusMode;
11093 uint8_t vsMode;
11094 uint8_t optStabMode;
11095 uint8_t cacMode;
11096 uint8_t edge_mode;
11097 uint8_t noise_red_mode;
11098 uint8_t tonemap_mode;
11099 bool highQualityModeEntryAvailable = FALSE;
11100 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011101 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11103 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011104 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011105 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011106 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011107
Thierry Strudel3d639192016-09-09 11:52:26 -070011108 switch (type) {
11109 case CAMERA3_TEMPLATE_PREVIEW:
11110 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11111 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11112 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11113 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11114 edge_mode = ANDROID_EDGE_MODE_FAST;
11115 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11116 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11117 break;
11118 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11119 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11120 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11121 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11122 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11123 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11124 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11125 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11126 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11127 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11128 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11129 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11130 highQualityModeEntryAvailable = TRUE;
11131 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11132 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11133 fastModeEntryAvailable = TRUE;
11134 }
11135 }
11136 if (highQualityModeEntryAvailable) {
11137 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11138 } else if (fastModeEntryAvailable) {
11139 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11140 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011141 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11142 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11143 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011144 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011145 break;
11146 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11147 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11148 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11149 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011150 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11151 edge_mode = ANDROID_EDGE_MODE_FAST;
11152 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11153 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11154 if (forceVideoOis)
11155 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11156 break;
11157 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11158 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11159 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11160 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011161 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11162 edge_mode = ANDROID_EDGE_MODE_FAST;
11163 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11164 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11165 if (forceVideoOis)
11166 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11167 break;
11168 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11169 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11170 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11171 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11172 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11173 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11174 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11175 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11176 break;
11177 case CAMERA3_TEMPLATE_MANUAL:
11178 edge_mode = ANDROID_EDGE_MODE_FAST;
11179 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11180 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11181 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11182 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11183 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11184 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11185 break;
11186 default:
11187 edge_mode = ANDROID_EDGE_MODE_FAST;
11188 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11189 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11190 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11191 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11192 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11193 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11194 break;
11195 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011196 // Set CAC to OFF if underlying device doesn't support
11197 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11198 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11199 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011200 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11201 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11202 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11203 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11204 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11205 }
11206 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011207 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011208 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011209
11210 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11211 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11212 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11213 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11214 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11215 || ois_disable)
11216 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11217 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011218 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011219
11220 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11221 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11222
11223 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11224 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11225
11226 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11227 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11228
11229 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11230 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11231
11232 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11233 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11234
11235 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11236 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11237
11238 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11239 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11240
11241 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11242 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11243
11244 /*flash*/
11245 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11246 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11247
11248 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11249 settings.update(ANDROID_FLASH_FIRING_POWER,
11250 &flashFiringLevel, 1);
11251
11252 /* lens */
11253 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11254 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11255
11256 if (gCamCapability[mCameraId]->filter_densities_count) {
11257 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11258 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11259 gCamCapability[mCameraId]->filter_densities_count);
11260 }
11261
11262 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11263 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11264
Thierry Strudel3d639192016-09-09 11:52:26 -070011265 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11266 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11267
11268 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11269 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11270
11271 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11272 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11273
11274 /* face detection (default to OFF) */
11275 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11276 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11277
Thierry Strudel54dc9782017-02-15 12:12:10 -080011278 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11279 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011280
11281 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11282 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11283
11284 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11285 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11286
Thierry Strudel3d639192016-09-09 11:52:26 -070011287
11288 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11289 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11290
11291 /* Exposure time(Update the Min Exposure Time)*/
11292 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11293 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11294
11295 /* frame duration */
11296 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11297 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11298
11299 /* sensitivity */
11300 static const int32_t default_sensitivity = 100;
11301 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011302#ifndef USE_HAL_3_3
11303 static const int32_t default_isp_sensitivity =
11304 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11305 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11306#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011307
11308 /*edge mode*/
11309 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11310
11311 /*noise reduction mode*/
11312 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11313
11314 /*color correction mode*/
11315 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11316 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11317
11318 /*transform matrix mode*/
11319 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11320
11321 int32_t scaler_crop_region[4];
11322 scaler_crop_region[0] = 0;
11323 scaler_crop_region[1] = 0;
11324 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11325 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11326 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11327
11328 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11329 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11330
11331 /*focus distance*/
11332 float focus_distance = 0.0;
11333 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11334
11335 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011336 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011337 float max_range = 0.0;
11338 float max_fixed_fps = 0.0;
11339 int32_t fps_range[2] = {0, 0};
11340 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11341 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011342 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11343 TEMPLATE_MAX_PREVIEW_FPS) {
11344 continue;
11345 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011346 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11347 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11348 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11349 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11350 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11351 if (range > max_range) {
11352 fps_range[0] =
11353 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11354 fps_range[1] =
11355 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11356 max_range = range;
11357 }
11358 } else {
11359 if (range < 0.01 && max_fixed_fps <
11360 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11361 fps_range[0] =
11362 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11363 fps_range[1] =
11364 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11365 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11366 }
11367 }
11368 }
11369 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11370
11371 /*precapture trigger*/
11372 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11373 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11374
11375 /*af trigger*/
11376 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11377 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11378
11379 /* ae & af regions */
11380 int32_t active_region[] = {
11381 gCamCapability[mCameraId]->active_array_size.left,
11382 gCamCapability[mCameraId]->active_array_size.top,
11383 gCamCapability[mCameraId]->active_array_size.left +
11384 gCamCapability[mCameraId]->active_array_size.width,
11385 gCamCapability[mCameraId]->active_array_size.top +
11386 gCamCapability[mCameraId]->active_array_size.height,
11387 0};
11388 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11389 sizeof(active_region) / sizeof(active_region[0]));
11390 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11391 sizeof(active_region) / sizeof(active_region[0]));
11392
11393 /* black level lock */
11394 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11395 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11396
Thierry Strudel3d639192016-09-09 11:52:26 -070011397 //special defaults for manual template
11398 if (type == CAMERA3_TEMPLATE_MANUAL) {
11399 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11400 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11401
11402 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11403 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11404
11405 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11406 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11407
11408 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11409 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11410
11411 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11412 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11413
11414 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11415 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11416 }
11417
11418
11419 /* TNR
11420 * We'll use this location to determine which modes TNR will be set.
11421 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11422 * This is not to be confused with linking on a per stream basis that decision
11423 * is still on per-session basis and will be handled as part of config stream
11424 */
11425 uint8_t tnr_enable = 0;
11426
11427 if (m_bTnrPreview || m_bTnrVideo) {
11428
11429 switch (type) {
11430 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11431 tnr_enable = 1;
11432 break;
11433
11434 default:
11435 tnr_enable = 0;
11436 break;
11437 }
11438
11439 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11440 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11441 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11442
11443 LOGD("TNR:%d with process plate %d for template:%d",
11444 tnr_enable, tnr_process_type, type);
11445 }
11446
11447 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011448 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011449 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11450
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011451 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011452 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11453
Shuzhen Wang920ea402017-05-03 08:49:39 -070011454 uint8_t related_camera_id = mCameraId;
11455 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011456
11457 /* CDS default */
11458 char prop[PROPERTY_VALUE_MAX];
11459 memset(prop, 0, sizeof(prop));
11460 property_get("persist.camera.CDS", prop, "Auto");
11461 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11462 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11463 if (CAM_CDS_MODE_MAX == cds_mode) {
11464 cds_mode = CAM_CDS_MODE_AUTO;
11465 }
11466
11467 /* Disabling CDS in templates which have TNR enabled*/
11468 if (tnr_enable)
11469 cds_mode = CAM_CDS_MODE_OFF;
11470
11471 int32_t mode = cds_mode;
11472 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011473
Thierry Strudel269c81a2016-10-12 12:13:59 -070011474 /* Manual Convergence AEC Speed is disabled by default*/
11475 float default_aec_speed = 0;
11476 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11477
11478 /* Manual Convergence AWB Speed is disabled by default*/
11479 float default_awb_speed = 0;
11480 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11481
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011482 // Set instant AEC to normal convergence by default
11483 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11484 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11485
Shuzhen Wang19463d72016-03-08 11:09:52 -080011486 /* hybrid ae */
11487 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11488
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011489 if (gExposeEnableZslKey) {
11490 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11491 }
11492
Thierry Strudel3d639192016-09-09 11:52:26 -070011493 mDefaultMetadata[type] = settings.release();
11494
11495 return mDefaultMetadata[type];
11496}
11497
11498/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011499 * FUNCTION : getExpectedFrameDuration
11500 *
11501 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11502 * duration
11503 *
11504 * PARAMETERS :
11505 * @request : request settings
11506 * @frameDuration : The maximum frame duration in nanoseconds
11507 *
11508 * RETURN : None
11509 *==========================================================================*/
11510void QCamera3HardwareInterface::getExpectedFrameDuration(
11511 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11512 if (nullptr == frameDuration) {
11513 return;
11514 }
11515
11516 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11517 find_camera_metadata_ro_entry(request,
11518 ANDROID_SENSOR_EXPOSURE_TIME,
11519 &e);
11520 if (e.count > 0) {
11521 *frameDuration = e.data.i64[0];
11522 }
11523 find_camera_metadata_ro_entry(request,
11524 ANDROID_SENSOR_FRAME_DURATION,
11525 &e);
11526 if (e.count > 0) {
11527 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11528 }
11529}
11530
11531/*===========================================================================
11532 * FUNCTION : calculateMaxExpectedDuration
11533 *
11534 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11535 * current camera settings.
11536 *
11537 * PARAMETERS :
11538 * @request : request settings
11539 *
11540 * RETURN : Expected frame duration in nanoseconds.
11541 *==========================================================================*/
11542nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11543 const camera_metadata_t *request) {
11544 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11545 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11546 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11547 if (e.count == 0) {
11548 return maxExpectedDuration;
11549 }
11550
11551 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11552 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11553 }
11554
11555 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11556 return maxExpectedDuration;
11557 }
11558
11559 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11560 if (e.count == 0) {
11561 return maxExpectedDuration;
11562 }
11563
11564 switch (e.data.u8[0]) {
11565 case ANDROID_CONTROL_AE_MODE_OFF:
11566 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11567 break;
11568 default:
11569 find_camera_metadata_ro_entry(request,
11570 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11571 &e);
11572 if (e.count > 1) {
11573 maxExpectedDuration = 1e9 / e.data.u8[0];
11574 }
11575 break;
11576 }
11577
11578 return maxExpectedDuration;
11579}
11580
11581/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011582 * FUNCTION : setFrameParameters
11583 *
11584 * DESCRIPTION: set parameters per frame as requested in the metadata from
11585 * framework
11586 *
11587 * PARAMETERS :
11588 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011589 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011590 * @blob_request: Whether this request is a blob request or not
11591 *
11592 * RETURN : success: NO_ERROR
11593 * failure:
11594 *==========================================================================*/
11595int QCamera3HardwareInterface::setFrameParameters(
11596 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011597 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 int blob_request,
11599 uint32_t snapshotStreamId)
11600{
11601 /*translate from camera_metadata_t type to parm_type_t*/
11602 int rc = 0;
11603 int32_t hal_version = CAM_HAL_V3;
11604
11605 clear_metadata_buffer(mParameters);
11606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11607 LOGE("Failed to set hal version in the parameters");
11608 return BAD_VALUE;
11609 }
11610
11611 /*we need to update the frame number in the parameters*/
11612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11613 request->frame_number)) {
11614 LOGE("Failed to set the frame number in the parameters");
11615 return BAD_VALUE;
11616 }
11617
11618 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011620 LOGE("Failed to set stream type mask in the parameters");
11621 return BAD_VALUE;
11622 }
11623
11624 if (mUpdateDebugLevel) {
11625 uint32_t dummyDebugLevel = 0;
11626 /* The value of dummyDebugLevel is irrelavent. On
11627 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11628 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11629 dummyDebugLevel)) {
11630 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11631 return BAD_VALUE;
11632 }
11633 mUpdateDebugLevel = false;
11634 }
11635
11636 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011637 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011638 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11639 if (blob_request)
11640 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11641 }
11642
11643 return rc;
11644}
11645
11646/*===========================================================================
11647 * FUNCTION : setReprocParameters
11648 *
11649 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11650 * return it.
11651 *
11652 * PARAMETERS :
11653 * @request : request that needs to be serviced
11654 *
11655 * RETURN : success: NO_ERROR
11656 * failure:
11657 *==========================================================================*/
11658int32_t QCamera3HardwareInterface::setReprocParameters(
11659 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11660 uint32_t snapshotStreamId)
11661{
11662 /*translate from camera_metadata_t type to parm_type_t*/
11663 int rc = 0;
11664
11665 if (NULL == request->settings){
11666 LOGE("Reprocess settings cannot be NULL");
11667 return BAD_VALUE;
11668 }
11669
11670 if (NULL == reprocParam) {
11671 LOGE("Invalid reprocessing metadata buffer");
11672 return BAD_VALUE;
11673 }
11674 clear_metadata_buffer(reprocParam);
11675
11676 /*we need to update the frame number in the parameters*/
11677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11678 request->frame_number)) {
11679 LOGE("Failed to set the frame number in the parameters");
11680 return BAD_VALUE;
11681 }
11682
11683 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11684 if (rc < 0) {
11685 LOGE("Failed to translate reproc request");
11686 return rc;
11687 }
11688
11689 CameraMetadata frame_settings;
11690 frame_settings = request->settings;
11691 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11692 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11693 int32_t *crop_count =
11694 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11695 int32_t *crop_data =
11696 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11697 int32_t *roi_map =
11698 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11699 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11700 cam_crop_data_t crop_meta;
11701 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11702 crop_meta.num_of_streams = 1;
11703 crop_meta.crop_info[0].crop.left = crop_data[0];
11704 crop_meta.crop_info[0].crop.top = crop_data[1];
11705 crop_meta.crop_info[0].crop.width = crop_data[2];
11706 crop_meta.crop_info[0].crop.height = crop_data[3];
11707
11708 crop_meta.crop_info[0].roi_map.left =
11709 roi_map[0];
11710 crop_meta.crop_info[0].roi_map.top =
11711 roi_map[1];
11712 crop_meta.crop_info[0].roi_map.width =
11713 roi_map[2];
11714 crop_meta.crop_info[0].roi_map.height =
11715 roi_map[3];
11716
11717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11718 rc = BAD_VALUE;
11719 }
11720 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11721 request->input_buffer->stream,
11722 crop_meta.crop_info[0].crop.left,
11723 crop_meta.crop_info[0].crop.top,
11724 crop_meta.crop_info[0].crop.width,
11725 crop_meta.crop_info[0].crop.height);
11726 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11727 request->input_buffer->stream,
11728 crop_meta.crop_info[0].roi_map.left,
11729 crop_meta.crop_info[0].roi_map.top,
11730 crop_meta.crop_info[0].roi_map.width,
11731 crop_meta.crop_info[0].roi_map.height);
11732 } else {
11733 LOGE("Invalid reprocess crop count %d!", *crop_count);
11734 }
11735 } else {
11736 LOGE("No crop data from matching output stream");
11737 }
11738
11739 /* These settings are not needed for regular requests so handle them specially for
11740 reprocess requests; information needed for EXIF tags */
11741 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11742 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11743 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11744 if (NAME_NOT_FOUND != val) {
11745 uint32_t flashMode = (uint32_t)val;
11746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11747 rc = BAD_VALUE;
11748 }
11749 } else {
11750 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11751 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11752 }
11753 } else {
11754 LOGH("No flash mode in reprocess settings");
11755 }
11756
11757 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11758 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11760 rc = BAD_VALUE;
11761 }
11762 } else {
11763 LOGH("No flash state in reprocess settings");
11764 }
11765
11766 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11767 uint8_t *reprocessFlags =
11768 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11770 *reprocessFlags)) {
11771 rc = BAD_VALUE;
11772 }
11773 }
11774
Thierry Strudel54dc9782017-02-15 12:12:10 -080011775 // Add exif debug data to internal metadata
11776 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11777 mm_jpeg_debug_exif_params_t *debug_params =
11778 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11779 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11780 // AE
11781 if (debug_params->ae_debug_params_valid == TRUE) {
11782 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11783 debug_params->ae_debug_params);
11784 }
11785 // AWB
11786 if (debug_params->awb_debug_params_valid == TRUE) {
11787 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11788 debug_params->awb_debug_params);
11789 }
11790 // AF
11791 if (debug_params->af_debug_params_valid == TRUE) {
11792 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11793 debug_params->af_debug_params);
11794 }
11795 // ASD
11796 if (debug_params->asd_debug_params_valid == TRUE) {
11797 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11798 debug_params->asd_debug_params);
11799 }
11800 // Stats
11801 if (debug_params->stats_debug_params_valid == TRUE) {
11802 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11803 debug_params->stats_debug_params);
11804 }
11805 // BE Stats
11806 if (debug_params->bestats_debug_params_valid == TRUE) {
11807 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11808 debug_params->bestats_debug_params);
11809 }
11810 // BHIST
11811 if (debug_params->bhist_debug_params_valid == TRUE) {
11812 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11813 debug_params->bhist_debug_params);
11814 }
11815 // 3A Tuning
11816 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11817 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11818 debug_params->q3a_tuning_debug_params);
11819 }
11820 }
11821
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011822 // Add metadata which reprocess needs
11823 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11824 cam_reprocess_info_t *repro_info =
11825 (cam_reprocess_info_t *)frame_settings.find
11826 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011827 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011828 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011829 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011830 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011831 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011832 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011833 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011834 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011835 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011836 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011837 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011838 repro_info->pipeline_flip);
11839 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11840 repro_info->af_roi);
11841 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11842 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011843 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11844 CAM_INTF_PARM_ROTATION metadata then has been added in
11845 translateToHalMetadata. HAL need to keep this new rotation
11846 metadata. Otherwise, the old rotation info saved in the vendor tag
11847 would be used */
11848 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11849 CAM_INTF_PARM_ROTATION, reprocParam) {
11850 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11851 } else {
11852 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011853 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011854 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011855 }
11856
11857 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11858 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11859 roi.width and roi.height would be the final JPEG size.
11860 For now, HAL only checks this for reprocess request */
11861 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11862 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11863 uint8_t *enable =
11864 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11865 if (*enable == TRUE) {
11866 int32_t *crop_data =
11867 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11868 cam_stream_crop_info_t crop_meta;
11869 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11870 crop_meta.stream_id = 0;
11871 crop_meta.crop.left = crop_data[0];
11872 crop_meta.crop.top = crop_data[1];
11873 crop_meta.crop.width = crop_data[2];
11874 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011875 // The JPEG crop roi should match cpp output size
11876 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11877 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11878 crop_meta.roi_map.left = 0;
11879 crop_meta.roi_map.top = 0;
11880 crop_meta.roi_map.width = cpp_crop->crop.width;
11881 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011882 }
11883 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11884 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011885 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011886 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011887 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11888 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011889 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011890 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11891
11892 // Add JPEG scale information
11893 cam_dimension_t scale_dim;
11894 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11895 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11896 int32_t *roi =
11897 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11898 scale_dim.width = roi[2];
11899 scale_dim.height = roi[3];
11900 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11901 scale_dim);
11902 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11903 scale_dim.width, scale_dim.height, mCameraId);
11904 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011905 }
11906 }
11907
11908 return rc;
11909}
11910
11911/*===========================================================================
11912 * FUNCTION : saveRequestSettings
11913 *
11914 * DESCRIPTION: Add any settings that might have changed to the request settings
11915 * and save the settings to be applied on the frame
11916 *
11917 * PARAMETERS :
11918 * @jpegMetadata : the extracted and/or modified jpeg metadata
11919 * @request : request with initial settings
11920 *
11921 * RETURN :
11922 * camera_metadata_t* : pointer to the saved request settings
11923 *==========================================================================*/
11924camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11925 const CameraMetadata &jpegMetadata,
11926 camera3_capture_request_t *request)
11927{
11928 camera_metadata_t *resultMetadata;
11929 CameraMetadata camMetadata;
11930 camMetadata = request->settings;
11931
11932 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11933 int32_t thumbnail_size[2];
11934 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11935 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11936 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11937 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11938 }
11939
11940 if (request->input_buffer != NULL) {
11941 uint8_t reprocessFlags = 1;
11942 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11943 (uint8_t*)&reprocessFlags,
11944 sizeof(reprocessFlags));
11945 }
11946
11947 resultMetadata = camMetadata.release();
11948 return resultMetadata;
11949}
11950
11951/*===========================================================================
11952 * FUNCTION : setHalFpsRange
11953 *
11954 * DESCRIPTION: set FPS range parameter
11955 *
11956 *
11957 * PARAMETERS :
11958 * @settings : Metadata from framework
11959 * @hal_metadata: Metadata buffer
11960 *
11961 *
11962 * RETURN : success: NO_ERROR
11963 * failure:
11964 *==========================================================================*/
11965int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11966 metadata_buffer_t *hal_metadata)
11967{
11968 int32_t rc = NO_ERROR;
11969 cam_fps_range_t fps_range;
11970 fps_range.min_fps = (float)
11971 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11972 fps_range.max_fps = (float)
11973 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11974 fps_range.video_min_fps = fps_range.min_fps;
11975 fps_range.video_max_fps = fps_range.max_fps;
11976
11977 LOGD("aeTargetFpsRange fps: [%f %f]",
11978 fps_range.min_fps, fps_range.max_fps);
11979 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11980 * follows:
11981 * ---------------------------------------------------------------|
11982 * Video stream is absent in configure_streams |
11983 * (Camcorder preview before the first video record |
11984 * ---------------------------------------------------------------|
11985 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11986 * | | | vid_min/max_fps|
11987 * ---------------------------------------------------------------|
11988 * NO | [ 30, 240] | 240 | [240, 240] |
11989 * |-------------|-------------|----------------|
11990 * | [240, 240] | 240 | [240, 240] |
11991 * ---------------------------------------------------------------|
11992 * Video stream is present in configure_streams |
11993 * ---------------------------------------------------------------|
11994 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11995 * | | | vid_min/max_fps|
11996 * ---------------------------------------------------------------|
11997 * NO | [ 30, 240] | 240 | [240, 240] |
11998 * (camcorder prev |-------------|-------------|----------------|
11999 * after video rec | [240, 240] | 240 | [240, 240] |
12000 * is stopped) | | | |
12001 * ---------------------------------------------------------------|
12002 * YES | [ 30, 240] | 240 | [240, 240] |
12003 * |-------------|-------------|----------------|
12004 * | [240, 240] | 240 | [240, 240] |
12005 * ---------------------------------------------------------------|
12006 * When Video stream is absent in configure_streams,
12007 * preview fps = sensor_fps / batchsize
12008 * Eg: for 240fps at batchSize 4, preview = 60fps
12009 * for 120fps at batchSize 4, preview = 30fps
12010 *
12011 * When video stream is present in configure_streams, preview fps is as per
12012 * the ratio of preview buffers to video buffers requested in process
12013 * capture request
12014 */
12015 mBatchSize = 0;
12016 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12017 fps_range.min_fps = fps_range.video_max_fps;
12018 fps_range.video_min_fps = fps_range.video_max_fps;
12019 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12020 fps_range.max_fps);
12021 if (NAME_NOT_FOUND != val) {
12022 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12024 return BAD_VALUE;
12025 }
12026
12027 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12028 /* If batchmode is currently in progress and the fps changes,
12029 * set the flag to restart the sensor */
12030 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12031 (mHFRVideoFps != fps_range.max_fps)) {
12032 mNeedSensorRestart = true;
12033 }
12034 mHFRVideoFps = fps_range.max_fps;
12035 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12036 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12037 mBatchSize = MAX_HFR_BATCH_SIZE;
12038 }
12039 }
12040 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12041
12042 }
12043 } else {
12044 /* HFR mode is session param in backend/ISP. This should be reset when
12045 * in non-HFR mode */
12046 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12048 return BAD_VALUE;
12049 }
12050 }
12051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12052 return BAD_VALUE;
12053 }
12054 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12055 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12056 return rc;
12057}
12058
12059/*===========================================================================
12060 * FUNCTION : translateToHalMetadata
12061 *
12062 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12063 *
12064 *
12065 * PARAMETERS :
12066 * @request : request sent from framework
12067 *
12068 *
12069 * RETURN : success: NO_ERROR
12070 * failure:
12071 *==========================================================================*/
12072int QCamera3HardwareInterface::translateToHalMetadata
12073 (const camera3_capture_request_t *request,
12074 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012075 uint32_t snapshotStreamId) {
12076 if (request == nullptr || hal_metadata == nullptr) {
12077 return BAD_VALUE;
12078 }
12079
12080 int64_t minFrameDuration = getMinFrameDuration(request);
12081
12082 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12083 minFrameDuration);
12084}
12085
12086int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12087 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12088 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12089
Thierry Strudel3d639192016-09-09 11:52:26 -070012090 int rc = 0;
12091 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012092 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012093
12094 /* Do not change the order of the following list unless you know what you are
12095 * doing.
12096 * The order is laid out in such a way that parameters in the front of the table
12097 * may be used to override the parameters later in the table. Examples are:
12098 * 1. META_MODE should precede AEC/AWB/AF MODE
12099 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12100 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12101 * 4. Any mode should precede it's corresponding settings
12102 */
12103 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12104 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12106 rc = BAD_VALUE;
12107 }
12108 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12109 if (rc != NO_ERROR) {
12110 LOGE("extractSceneMode failed");
12111 }
12112 }
12113
12114 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12115 uint8_t fwk_aeMode =
12116 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12117 uint8_t aeMode;
12118 int32_t redeye;
12119
12120 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12121 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012122 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12123 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012124 } else {
12125 aeMode = CAM_AE_MODE_ON;
12126 }
12127 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12128 redeye = 1;
12129 } else {
12130 redeye = 0;
12131 }
12132
12133 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12134 fwk_aeMode);
12135 if (NAME_NOT_FOUND != val) {
12136 int32_t flashMode = (int32_t)val;
12137 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12138 }
12139
12140 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12142 rc = BAD_VALUE;
12143 }
12144 }
12145
12146 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12147 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12148 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12149 fwk_whiteLevel);
12150 if (NAME_NOT_FOUND != val) {
12151 uint8_t whiteLevel = (uint8_t)val;
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12153 rc = BAD_VALUE;
12154 }
12155 }
12156 }
12157
12158 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12159 uint8_t fwk_cacMode =
12160 frame_settings.find(
12161 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12162 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12163 fwk_cacMode);
12164 if (NAME_NOT_FOUND != val) {
12165 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12166 bool entryAvailable = FALSE;
12167 // Check whether Frameworks set CAC mode is supported in device or not
12168 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12169 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12170 entryAvailable = TRUE;
12171 break;
12172 }
12173 }
12174 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12175 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12176 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12177 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12178 if (entryAvailable == FALSE) {
12179 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12180 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12181 } else {
12182 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12183 // High is not supported and so set the FAST as spec say's underlying
12184 // device implementation can be the same for both modes.
12185 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12186 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12187 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12188 // in order to avoid the fps drop due to high quality
12189 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12190 } else {
12191 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12192 }
12193 }
12194 }
12195 LOGD("Final cacMode is %d", cacMode);
12196 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12197 rc = BAD_VALUE;
12198 }
12199 } else {
12200 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12201 }
12202 }
12203
Jason Lee84ae9972017-02-24 13:24:24 -080012204 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012205 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012206 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012207 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012208 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12209 fwk_focusMode);
12210 if (NAME_NOT_FOUND != val) {
12211 uint8_t focusMode = (uint8_t)val;
12212 LOGD("set focus mode %d", focusMode);
12213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12214 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12215 rc = BAD_VALUE;
12216 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012217 }
12218 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012219 } else {
12220 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12221 LOGE("Focus forced to infinity %d", focusMode);
12222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12223 rc = BAD_VALUE;
12224 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012225 }
12226
Jason Lee84ae9972017-02-24 13:24:24 -080012227 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12228 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012229 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12231 focalDistance)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12237 uint8_t fwk_antibandingMode =
12238 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12239 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12240 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12241 if (NAME_NOT_FOUND != val) {
12242 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012243 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12244 if (m60HzZone) {
12245 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12246 } else {
12247 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12248 }
12249 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12251 hal_antibandingMode)) {
12252 rc = BAD_VALUE;
12253 }
12254 }
12255 }
12256
12257 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12258 int32_t expCompensation = frame_settings.find(
12259 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12260 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12261 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12262 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12263 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012264 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12266 expCompensation)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270
12271 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12272 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12274 rc = BAD_VALUE;
12275 }
12276 }
12277 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12278 rc = setHalFpsRange(frame_settings, hal_metadata);
12279 if (rc != NO_ERROR) {
12280 LOGE("setHalFpsRange failed");
12281 }
12282 }
12283
12284 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12285 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12287 rc = BAD_VALUE;
12288 }
12289 }
12290
12291 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12292 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12293 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12294 fwk_effectMode);
12295 if (NAME_NOT_FOUND != val) {
12296 uint8_t effectMode = (uint8_t)val;
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12298 rc = BAD_VALUE;
12299 }
12300 }
12301 }
12302
12303 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12304 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12306 colorCorrectMode)) {
12307 rc = BAD_VALUE;
12308 }
12309 }
12310
12311 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12312 cam_color_correct_gains_t colorCorrectGains;
12313 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12314 colorCorrectGains.gains[i] =
12315 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12316 }
12317 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12318 colorCorrectGains)) {
12319 rc = BAD_VALUE;
12320 }
12321 }
12322
12323 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12324 cam_color_correct_matrix_t colorCorrectTransform;
12325 cam_rational_type_t transform_elem;
12326 size_t num = 0;
12327 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12328 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12329 transform_elem.numerator =
12330 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12331 transform_elem.denominator =
12332 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12333 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12334 num++;
12335 }
12336 }
12337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12338 colorCorrectTransform)) {
12339 rc = BAD_VALUE;
12340 }
12341 }
12342
12343 cam_trigger_t aecTrigger;
12344 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12345 aecTrigger.trigger_id = -1;
12346 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12347 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12348 aecTrigger.trigger =
12349 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12350 aecTrigger.trigger_id =
12351 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12352 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12353 aecTrigger)) {
12354 rc = BAD_VALUE;
12355 }
12356 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12357 aecTrigger.trigger, aecTrigger.trigger_id);
12358 }
12359
12360 /*af_trigger must come with a trigger id*/
12361 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12362 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12363 cam_trigger_t af_trigger;
12364 af_trigger.trigger =
12365 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12366 af_trigger.trigger_id =
12367 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12369 rc = BAD_VALUE;
12370 }
12371 LOGD("AfTrigger: %d AfTriggerID: %d",
12372 af_trigger.trigger, af_trigger.trigger_id);
12373 }
12374
12375 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12376 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12378 rc = BAD_VALUE;
12379 }
12380 }
12381 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12382 cam_edge_application_t edge_application;
12383 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012384
Thierry Strudel3d639192016-09-09 11:52:26 -070012385 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12386 edge_application.sharpness = 0;
12387 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012388 edge_application.sharpness =
12389 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12390 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12391 int32_t sharpness =
12392 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12393 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12394 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12395 LOGD("Setting edge mode sharpness %d", sharpness);
12396 edge_application.sharpness = sharpness;
12397 }
12398 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012399 }
12400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12401 rc = BAD_VALUE;
12402 }
12403 }
12404
12405 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12406 int32_t respectFlashMode = 1;
12407 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12408 uint8_t fwk_aeMode =
12409 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012410 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12411 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12412 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012413 respectFlashMode = 0;
12414 LOGH("AE Mode controls flash, ignore android.flash.mode");
12415 }
12416 }
12417 if (respectFlashMode) {
12418 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12419 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12420 LOGH("flash mode after mapping %d", val);
12421 // To check: CAM_INTF_META_FLASH_MODE usage
12422 if (NAME_NOT_FOUND != val) {
12423 uint8_t flashMode = (uint8_t)val;
12424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12425 rc = BAD_VALUE;
12426 }
12427 }
12428 }
12429 }
12430
12431 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12432 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437
12438 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12439 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12441 flashFiringTime)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445
12446 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12447 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12448 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12449 hotPixelMode)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453
12454 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12455 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12457 lensAperture)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461
12462 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12463 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12465 filterDensity)) {
12466 rc = BAD_VALUE;
12467 }
12468 }
12469
12470 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12471 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12473 focalLength)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12479 uint8_t optStabMode =
12480 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12482 optStabMode)) {
12483 rc = BAD_VALUE;
12484 }
12485 }
12486
12487 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12488 uint8_t videoStabMode =
12489 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12490 LOGD("videoStabMode from APP = %d", videoStabMode);
12491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12492 videoStabMode)) {
12493 rc = BAD_VALUE;
12494 }
12495 }
12496
12497
12498 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12499 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12501 noiseRedMode)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505
12506 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12507 float reprocessEffectiveExposureFactor =
12508 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12510 reprocessEffectiveExposureFactor)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514
12515 cam_crop_region_t scalerCropRegion;
12516 bool scalerCropSet = false;
12517 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12518 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12519 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12520 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12521 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12522
12523 // Map coordinate system from active array to sensor output.
12524 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12525 scalerCropRegion.width, scalerCropRegion.height);
12526
12527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12528 scalerCropRegion)) {
12529 rc = BAD_VALUE;
12530 }
12531 scalerCropSet = true;
12532 }
12533
12534 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12535 int64_t sensorExpTime =
12536 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12537 LOGD("setting sensorExpTime %lld", sensorExpTime);
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12539 sensorExpTime)) {
12540 rc = BAD_VALUE;
12541 }
12542 }
12543
12544 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12545 int64_t sensorFrameDuration =
12546 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012547 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12548 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12549 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12550 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12552 sensorFrameDuration)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12558 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12559 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12560 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12561 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12562 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12563 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12565 sensorSensitivity)) {
12566 rc = BAD_VALUE;
12567 }
12568 }
12569
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012570#ifndef USE_HAL_3_3
12571 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12572 int32_t ispSensitivity =
12573 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12574 if (ispSensitivity <
12575 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12576 ispSensitivity =
12577 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12578 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12579 }
12580 if (ispSensitivity >
12581 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12582 ispSensitivity =
12583 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12584 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12585 }
12586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12587 ispSensitivity)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591#endif
12592
Thierry Strudel3d639192016-09-09 11:52:26 -070012593 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12594 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12595 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12596 rc = BAD_VALUE;
12597 }
12598 }
12599
12600 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12601 uint8_t fwk_facedetectMode =
12602 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12603
12604 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12605 fwk_facedetectMode);
12606
12607 if (NAME_NOT_FOUND != val) {
12608 uint8_t facedetectMode = (uint8_t)val;
12609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12610 facedetectMode)) {
12611 rc = BAD_VALUE;
12612 }
12613 }
12614 }
12615
Thierry Strudel54dc9782017-02-15 12:12:10 -080012616 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012617 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12620 histogramMode)) {
12621 rc = BAD_VALUE;
12622 }
12623 }
12624
12625 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12626 uint8_t sharpnessMapMode =
12627 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12628 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12629 sharpnessMapMode)) {
12630 rc = BAD_VALUE;
12631 }
12632 }
12633
12634 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12635 uint8_t tonemapMode =
12636 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12637 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12642 /*All tonemap channels will have the same number of points*/
12643 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12644 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12645 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12646 cam_rgb_tonemap_curves tonemapCurves;
12647 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12648 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12649 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12650 tonemapCurves.tonemap_points_cnt,
12651 CAM_MAX_TONEMAP_CURVE_SIZE);
12652 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12653 }
12654
12655 /* ch0 = G*/
12656 size_t point = 0;
12657 cam_tonemap_curve_t tonemapCurveGreen;
12658 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12659 for (size_t j = 0; j < 2; j++) {
12660 tonemapCurveGreen.tonemap_points[i][j] =
12661 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12662 point++;
12663 }
12664 }
12665 tonemapCurves.curves[0] = tonemapCurveGreen;
12666
12667 /* ch 1 = B */
12668 point = 0;
12669 cam_tonemap_curve_t tonemapCurveBlue;
12670 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12671 for (size_t j = 0; j < 2; j++) {
12672 tonemapCurveBlue.tonemap_points[i][j] =
12673 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12674 point++;
12675 }
12676 }
12677 tonemapCurves.curves[1] = tonemapCurveBlue;
12678
12679 /* ch 2 = R */
12680 point = 0;
12681 cam_tonemap_curve_t tonemapCurveRed;
12682 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12683 for (size_t j = 0; j < 2; j++) {
12684 tonemapCurveRed.tonemap_points[i][j] =
12685 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12686 point++;
12687 }
12688 }
12689 tonemapCurves.curves[2] = tonemapCurveRed;
12690
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12692 tonemapCurves)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696
12697 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12698 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12700 captureIntent)) {
12701 rc = BAD_VALUE;
12702 }
12703 }
12704
12705 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12706 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12708 blackLevelLock)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12714 uint8_t lensShadingMapMode =
12715 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12716 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12717 lensShadingMapMode)) {
12718 rc = BAD_VALUE;
12719 }
12720 }
12721
12722 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12723 cam_area_t roi;
12724 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012725 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012726
12727 // Map coordinate system from active array to sensor output.
12728 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12729 roi.rect.height);
12730
12731 if (scalerCropSet) {
12732 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12733 }
12734 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12735 rc = BAD_VALUE;
12736 }
12737 }
12738
12739 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12740 cam_area_t roi;
12741 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012742 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012743
12744 // Map coordinate system from active array to sensor output.
12745 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12746 roi.rect.height);
12747
12748 if (scalerCropSet) {
12749 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12750 }
12751 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12752 rc = BAD_VALUE;
12753 }
12754 }
12755
12756 // CDS for non-HFR non-video mode
12757 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12758 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12759 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12760 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12761 LOGE("Invalid CDS mode %d!", *fwk_cds);
12762 } else {
12763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12764 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12765 rc = BAD_VALUE;
12766 }
12767 }
12768 }
12769
Thierry Strudel04e026f2016-10-10 11:27:36 -070012770 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012771 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012772 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012773 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12774 }
12775 if (m_bVideoHdrEnabled)
12776 vhdr = CAM_VIDEO_HDR_MODE_ON;
12777
Thierry Strudel54dc9782017-02-15 12:12:10 -080012778 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12779
12780 if(vhdr != curr_hdr_state)
12781 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12782
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012783 rc = setVideoHdrMode(mParameters, vhdr);
12784 if (rc != NO_ERROR) {
12785 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012786 }
12787
12788 //IR
12789 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12790 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12791 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012792 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12793 uint8_t isIRon = 0;
12794
12795 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012796 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12797 LOGE("Invalid IR mode %d!", fwk_ir);
12798 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012799 if(isIRon != curr_ir_state )
12800 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12801
Thierry Strudel04e026f2016-10-10 11:27:36 -070012802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12803 CAM_INTF_META_IR_MODE, fwk_ir)) {
12804 rc = BAD_VALUE;
12805 }
12806 }
12807 }
12808
Thierry Strudel54dc9782017-02-15 12:12:10 -080012809 //Binning Correction Mode
12810 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12811 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12812 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12813 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12814 || (0 > fwk_binning_correction)) {
12815 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12816 } else {
12817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12818 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12819 rc = BAD_VALUE;
12820 }
12821 }
12822 }
12823
Thierry Strudel269c81a2016-10-12 12:13:59 -070012824 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12825 float aec_speed;
12826 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12827 LOGD("AEC Speed :%f", aec_speed);
12828 if ( aec_speed < 0 ) {
12829 LOGE("Invalid AEC mode %f!", aec_speed);
12830 } else {
12831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12832 aec_speed)) {
12833 rc = BAD_VALUE;
12834 }
12835 }
12836 }
12837
12838 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12839 float awb_speed;
12840 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12841 LOGD("AWB Speed :%f", awb_speed);
12842 if ( awb_speed < 0 ) {
12843 LOGE("Invalid AWB mode %f!", awb_speed);
12844 } else {
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12846 awb_speed)) {
12847 rc = BAD_VALUE;
12848 }
12849 }
12850 }
12851
Thierry Strudel3d639192016-09-09 11:52:26 -070012852 // TNR
12853 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12854 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12855 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012856 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012857 cam_denoise_param_t tnr;
12858 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12859 tnr.process_plates =
12860 (cam_denoise_process_type_t)frame_settings.find(
12861 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12862 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012863
12864 if(b_TnrRequested != curr_tnr_state)
12865 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12866
Thierry Strudel3d639192016-09-09 11:52:26 -070012867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12868 rc = BAD_VALUE;
12869 }
12870 }
12871
Thierry Strudel54dc9782017-02-15 12:12:10 -080012872 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012873 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012874 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12876 *exposure_metering_mode)) {
12877 rc = BAD_VALUE;
12878 }
12879 }
12880
Thierry Strudel3d639192016-09-09 11:52:26 -070012881 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12882 int32_t fwk_testPatternMode =
12883 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12884 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12885 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12886
12887 if (NAME_NOT_FOUND != testPatternMode) {
12888 cam_test_pattern_data_t testPatternData;
12889 memset(&testPatternData, 0, sizeof(testPatternData));
12890 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12891 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12892 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12893 int32_t *fwk_testPatternData =
12894 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12895 testPatternData.r = fwk_testPatternData[0];
12896 testPatternData.b = fwk_testPatternData[3];
12897 switch (gCamCapability[mCameraId]->color_arrangement) {
12898 case CAM_FILTER_ARRANGEMENT_RGGB:
12899 case CAM_FILTER_ARRANGEMENT_GRBG:
12900 testPatternData.gr = fwk_testPatternData[1];
12901 testPatternData.gb = fwk_testPatternData[2];
12902 break;
12903 case CAM_FILTER_ARRANGEMENT_GBRG:
12904 case CAM_FILTER_ARRANGEMENT_BGGR:
12905 testPatternData.gr = fwk_testPatternData[2];
12906 testPatternData.gb = fwk_testPatternData[1];
12907 break;
12908 default:
12909 LOGE("color arrangement %d is not supported",
12910 gCamCapability[mCameraId]->color_arrangement);
12911 break;
12912 }
12913 }
12914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12915 testPatternData)) {
12916 rc = BAD_VALUE;
12917 }
12918 } else {
12919 LOGE("Invalid framework sensor test pattern mode %d",
12920 fwk_testPatternMode);
12921 }
12922 }
12923
12924 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12925 size_t count = 0;
12926 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12927 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12928 gps_coords.data.d, gps_coords.count, count);
12929 if (gps_coords.count != count) {
12930 rc = BAD_VALUE;
12931 }
12932 }
12933
12934 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12935 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12936 size_t count = 0;
12937 const char *gps_methods_src = (const char *)
12938 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12939 memset(gps_methods, '\0', sizeof(gps_methods));
12940 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12941 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12942 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12943 if (GPS_PROCESSING_METHOD_SIZE != count) {
12944 rc = BAD_VALUE;
12945 }
12946 }
12947
12948 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12949 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12951 gps_timestamp)) {
12952 rc = BAD_VALUE;
12953 }
12954 }
12955
12956 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12957 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12958 cam_rotation_info_t rotation_info;
12959 if (orientation == 0) {
12960 rotation_info.rotation = ROTATE_0;
12961 } else if (orientation == 90) {
12962 rotation_info.rotation = ROTATE_90;
12963 } else if (orientation == 180) {
12964 rotation_info.rotation = ROTATE_180;
12965 } else if (orientation == 270) {
12966 rotation_info.rotation = ROTATE_270;
12967 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012968 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012969 rotation_info.streamId = snapshotStreamId;
12970 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12972 rc = BAD_VALUE;
12973 }
12974 }
12975
12976 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12977 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12979 rc = BAD_VALUE;
12980 }
12981 }
12982
12983 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12984 uint32_t thumb_quality = (uint32_t)
12985 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12987 thumb_quality)) {
12988 rc = BAD_VALUE;
12989 }
12990 }
12991
12992 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12993 cam_dimension_t dim;
12994 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12995 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12997 rc = BAD_VALUE;
12998 }
12999 }
13000
13001 // Internal metadata
13002 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13003 size_t count = 0;
13004 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13005 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13006 privatedata.data.i32, privatedata.count, count);
13007 if (privatedata.count != count) {
13008 rc = BAD_VALUE;
13009 }
13010 }
13011
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013012 // ISO/Exposure Priority
13013 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13014 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13015 cam_priority_mode_t mode =
13016 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13017 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13018 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13019 use_iso_exp_pty.previewOnly = FALSE;
13020 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13021 use_iso_exp_pty.value = *ptr;
13022
13023 if(CAM_ISO_PRIORITY == mode) {
13024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13025 use_iso_exp_pty)) {
13026 rc = BAD_VALUE;
13027 }
13028 }
13029 else {
13030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13031 use_iso_exp_pty)) {
13032 rc = BAD_VALUE;
13033 }
13034 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013035
13036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13037 rc = BAD_VALUE;
13038 }
13039 }
13040 } else {
13041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13042 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013043 }
13044 }
13045
13046 // Saturation
13047 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13048 int32_t* use_saturation =
13049 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13051 rc = BAD_VALUE;
13052 }
13053 }
13054
Thierry Strudel3d639192016-09-09 11:52:26 -070013055 // EV step
13056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13057 gCamCapability[mCameraId]->exp_compensation_step)) {
13058 rc = BAD_VALUE;
13059 }
13060
13061 // CDS info
13062 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13063 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13064 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13065
13066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13067 CAM_INTF_META_CDS_DATA, *cdsData)) {
13068 rc = BAD_VALUE;
13069 }
13070 }
13071
Shuzhen Wang19463d72016-03-08 11:09:52 -080013072 // Hybrid AE
13073 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13074 uint8_t *hybrid_ae = (uint8_t *)
13075 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
13076
13077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13078 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13079 rc = BAD_VALUE;
13080 }
13081 }
13082
Shuzhen Wang14415f52016-11-16 18:26:18 -080013083 // Histogram
13084 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13085 uint8_t histogramMode =
13086 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13088 histogramMode)) {
13089 rc = BAD_VALUE;
13090 }
13091 }
13092
13093 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13094 int32_t histogramBins =
13095 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13097 histogramBins)) {
13098 rc = BAD_VALUE;
13099 }
13100 }
13101
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013102 // Tracking AF
13103 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13104 uint8_t trackingAfTrigger =
13105 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13106 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13107 trackingAfTrigger)) {
13108 rc = BAD_VALUE;
13109 }
13110 }
13111
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013112 // Makernote
13113 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13114 if (entry.count != 0) {
13115 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13116 cam_makernote_t makernote;
13117 makernote.length = entry.count;
13118 memcpy(makernote.data, entry.data.u8, makernote.length);
13119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13120 rc = BAD_VALUE;
13121 }
13122 } else {
13123 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13124 MAX_MAKERNOTE_LENGTH);
13125 rc = BAD_VALUE;
13126 }
13127 }
13128
Thierry Strudel3d639192016-09-09 11:52:26 -070013129 return rc;
13130}
13131
13132/*===========================================================================
13133 * FUNCTION : captureResultCb
13134 *
13135 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13136 *
13137 * PARAMETERS :
13138 * @frame : frame information from mm-camera-interface
13139 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13140 * @userdata: userdata
13141 *
13142 * RETURN : NONE
13143 *==========================================================================*/
13144void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13145 camera3_stream_buffer_t *buffer,
13146 uint32_t frame_number, bool isInputBuffer, void *userdata)
13147{
13148 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13149 if (hw == NULL) {
13150 LOGE("Invalid hw %p", hw);
13151 return;
13152 }
13153
13154 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13155 return;
13156}
13157
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013158/*===========================================================================
13159 * FUNCTION : setBufferErrorStatus
13160 *
13161 * DESCRIPTION: Callback handler for channels to report any buffer errors
13162 *
13163 * PARAMETERS :
13164 * @ch : Channel on which buffer error is reported from
13165 * @frame_number : frame number on which buffer error is reported on
13166 * @buffer_status : buffer error status
13167 * @userdata: userdata
13168 *
13169 * RETURN : NONE
13170 *==========================================================================*/
13171void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13172 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13173{
13174 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13175 if (hw == NULL) {
13176 LOGE("Invalid hw %p", hw);
13177 return;
13178 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013179
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013180 hw->setBufferErrorStatus(ch, frame_number, err);
13181 return;
13182}
13183
13184void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13185 uint32_t frameNumber, camera3_buffer_status_t err)
13186{
13187 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13188 pthread_mutex_lock(&mMutex);
13189
13190 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13191 if (req.frame_number != frameNumber)
13192 continue;
13193 for (auto& k : req.mPendingBufferList) {
13194 if(k.stream->priv == ch) {
13195 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13196 }
13197 }
13198 }
13199
13200 pthread_mutex_unlock(&mMutex);
13201 return;
13202}
Thierry Strudel3d639192016-09-09 11:52:26 -070013203/*===========================================================================
13204 * FUNCTION : initialize
13205 *
13206 * DESCRIPTION: Pass framework callback pointers to HAL
13207 *
13208 * PARAMETERS :
13209 *
13210 *
13211 * RETURN : Success : 0
13212 * Failure: -ENODEV
13213 *==========================================================================*/
13214
13215int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13216 const camera3_callback_ops_t *callback_ops)
13217{
13218 LOGD("E");
13219 QCamera3HardwareInterface *hw =
13220 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13221 if (!hw) {
13222 LOGE("NULL camera device");
13223 return -ENODEV;
13224 }
13225
13226 int rc = hw->initialize(callback_ops);
13227 LOGD("X");
13228 return rc;
13229}
13230
13231/*===========================================================================
13232 * FUNCTION : configure_streams
13233 *
13234 * DESCRIPTION:
13235 *
13236 * PARAMETERS :
13237 *
13238 *
13239 * RETURN : Success: 0
13240 * Failure: -EINVAL (if stream configuration is invalid)
13241 * -ENODEV (fatal error)
13242 *==========================================================================*/
13243
13244int QCamera3HardwareInterface::configure_streams(
13245 const struct camera3_device *device,
13246 camera3_stream_configuration_t *stream_list)
13247{
13248 LOGD("E");
13249 QCamera3HardwareInterface *hw =
13250 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13251 if (!hw) {
13252 LOGE("NULL camera device");
13253 return -ENODEV;
13254 }
13255 int rc = hw->configureStreams(stream_list);
13256 LOGD("X");
13257 return rc;
13258}
13259
13260/*===========================================================================
13261 * FUNCTION : construct_default_request_settings
13262 *
13263 * DESCRIPTION: Configure a settings buffer to meet the required use case
13264 *
13265 * PARAMETERS :
13266 *
13267 *
13268 * RETURN : Success: Return valid metadata
13269 * Failure: Return NULL
13270 *==========================================================================*/
13271const camera_metadata_t* QCamera3HardwareInterface::
13272 construct_default_request_settings(const struct camera3_device *device,
13273 int type)
13274{
13275
13276 LOGD("E");
13277 camera_metadata_t* fwk_metadata = NULL;
13278 QCamera3HardwareInterface *hw =
13279 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13280 if (!hw) {
13281 LOGE("NULL camera device");
13282 return NULL;
13283 }
13284
13285 fwk_metadata = hw->translateCapabilityToMetadata(type);
13286
13287 LOGD("X");
13288 return fwk_metadata;
13289}
13290
13291/*===========================================================================
13292 * FUNCTION : process_capture_request
13293 *
13294 * DESCRIPTION:
13295 *
13296 * PARAMETERS :
13297 *
13298 *
13299 * RETURN :
13300 *==========================================================================*/
13301int QCamera3HardwareInterface::process_capture_request(
13302 const struct camera3_device *device,
13303 camera3_capture_request_t *request)
13304{
13305 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013306 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013307 QCamera3HardwareInterface *hw =
13308 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13309 if (!hw) {
13310 LOGE("NULL camera device");
13311 return -EINVAL;
13312 }
13313
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013314 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013315 LOGD("X");
13316 return rc;
13317}
13318
13319/*===========================================================================
13320 * FUNCTION : dump
13321 *
13322 * DESCRIPTION:
13323 *
13324 * PARAMETERS :
13325 *
13326 *
13327 * RETURN :
13328 *==========================================================================*/
13329
13330void QCamera3HardwareInterface::dump(
13331 const struct camera3_device *device, int fd)
13332{
13333 /* Log level property is read when "adb shell dumpsys media.camera" is
13334 called so that the log level can be controlled without restarting
13335 the media server */
13336 getLogLevel();
13337
13338 LOGD("E");
13339 QCamera3HardwareInterface *hw =
13340 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13341 if (!hw) {
13342 LOGE("NULL camera device");
13343 return;
13344 }
13345
13346 hw->dump(fd);
13347 LOGD("X");
13348 return;
13349}
13350
13351/*===========================================================================
13352 * FUNCTION : flush
13353 *
13354 * DESCRIPTION:
13355 *
13356 * PARAMETERS :
13357 *
13358 *
13359 * RETURN :
13360 *==========================================================================*/
13361
13362int QCamera3HardwareInterface::flush(
13363 const struct camera3_device *device)
13364{
13365 int rc;
13366 LOGD("E");
13367 QCamera3HardwareInterface *hw =
13368 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13369 if (!hw) {
13370 LOGE("NULL camera device");
13371 return -EINVAL;
13372 }
13373
13374 pthread_mutex_lock(&hw->mMutex);
13375 // Validate current state
13376 switch (hw->mState) {
13377 case STARTED:
13378 /* valid state */
13379 break;
13380
13381 case ERROR:
13382 pthread_mutex_unlock(&hw->mMutex);
13383 hw->handleCameraDeviceError();
13384 return -ENODEV;
13385
13386 default:
13387 LOGI("Flush returned during state %d", hw->mState);
13388 pthread_mutex_unlock(&hw->mMutex);
13389 return 0;
13390 }
13391 pthread_mutex_unlock(&hw->mMutex);
13392
13393 rc = hw->flush(true /* restart channels */ );
13394 LOGD("X");
13395 return rc;
13396}
13397
13398/*===========================================================================
13399 * FUNCTION : close_camera_device
13400 *
13401 * DESCRIPTION:
13402 *
13403 * PARAMETERS :
13404 *
13405 *
13406 * RETURN :
13407 *==========================================================================*/
13408int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13409{
13410 int ret = NO_ERROR;
13411 QCamera3HardwareInterface *hw =
13412 reinterpret_cast<QCamera3HardwareInterface *>(
13413 reinterpret_cast<camera3_device_t *>(device)->priv);
13414 if (!hw) {
13415 LOGE("NULL camera device");
13416 return BAD_VALUE;
13417 }
13418
13419 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13420 delete hw;
13421 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013422 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013423 return ret;
13424}
13425
13426/*===========================================================================
13427 * FUNCTION : getWaveletDenoiseProcessPlate
13428 *
13429 * DESCRIPTION: query wavelet denoise process plate
13430 *
13431 * PARAMETERS : None
13432 *
13433 * RETURN : WNR prcocess plate value
13434 *==========================================================================*/
13435cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13436{
13437 char prop[PROPERTY_VALUE_MAX];
13438 memset(prop, 0, sizeof(prop));
13439 property_get("persist.denoise.process.plates", prop, "0");
13440 int processPlate = atoi(prop);
13441 switch(processPlate) {
13442 case 0:
13443 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13444 case 1:
13445 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13446 case 2:
13447 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13448 case 3:
13449 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13450 default:
13451 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13452 }
13453}
13454
13455
13456/*===========================================================================
13457 * FUNCTION : getTemporalDenoiseProcessPlate
13458 *
13459 * DESCRIPTION: query temporal denoise process plate
13460 *
13461 * PARAMETERS : None
13462 *
13463 * RETURN : TNR prcocess plate value
13464 *==========================================================================*/
13465cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13466{
13467 char prop[PROPERTY_VALUE_MAX];
13468 memset(prop, 0, sizeof(prop));
13469 property_get("persist.tnr.process.plates", prop, "0");
13470 int processPlate = atoi(prop);
13471 switch(processPlate) {
13472 case 0:
13473 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13474 case 1:
13475 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13476 case 2:
13477 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13478 case 3:
13479 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13480 default:
13481 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13482 }
13483}
13484
13485
13486/*===========================================================================
13487 * FUNCTION : extractSceneMode
13488 *
13489 * DESCRIPTION: Extract scene mode from frameworks set metadata
13490 *
13491 * PARAMETERS :
13492 * @frame_settings: CameraMetadata reference
13493 * @metaMode: ANDROID_CONTORL_MODE
13494 * @hal_metadata: hal metadata structure
13495 *
13496 * RETURN : None
13497 *==========================================================================*/
13498int32_t QCamera3HardwareInterface::extractSceneMode(
13499 const CameraMetadata &frame_settings, uint8_t metaMode,
13500 metadata_buffer_t *hal_metadata)
13501{
13502 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013503 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13504
13505 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13506 LOGD("Ignoring control mode OFF_KEEP_STATE");
13507 return NO_ERROR;
13508 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013509
13510 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13511 camera_metadata_ro_entry entry =
13512 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13513 if (0 == entry.count)
13514 return rc;
13515
13516 uint8_t fwk_sceneMode = entry.data.u8[0];
13517
13518 int val = lookupHalName(SCENE_MODES_MAP,
13519 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13520 fwk_sceneMode);
13521 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013522 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013523 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013524 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013525 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013526
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013527 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13528 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13529 }
13530
13531 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13532 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013533 cam_hdr_param_t hdr_params;
13534 hdr_params.hdr_enable = 1;
13535 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13536 hdr_params.hdr_need_1x = false;
13537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13538 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13539 rc = BAD_VALUE;
13540 }
13541 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013542
Thierry Strudel3d639192016-09-09 11:52:26 -070013543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13544 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13545 rc = BAD_VALUE;
13546 }
13547 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013548
13549 if (mForceHdrSnapshot) {
13550 cam_hdr_param_t hdr_params;
13551 hdr_params.hdr_enable = 1;
13552 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13553 hdr_params.hdr_need_1x = false;
13554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13555 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13556 rc = BAD_VALUE;
13557 }
13558 }
13559
Thierry Strudel3d639192016-09-09 11:52:26 -070013560 return rc;
13561}
13562
13563/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013564 * FUNCTION : setVideoHdrMode
13565 *
13566 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13567 *
13568 * PARAMETERS :
13569 * @hal_metadata: hal metadata structure
13570 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13571 *
13572 * RETURN : None
13573 *==========================================================================*/
13574int32_t QCamera3HardwareInterface::setVideoHdrMode(
13575 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13576{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013577 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13578 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13579 }
13580
13581 LOGE("Invalid Video HDR mode %d!", vhdr);
13582 return BAD_VALUE;
13583}
13584
13585/*===========================================================================
13586 * FUNCTION : setSensorHDR
13587 *
13588 * DESCRIPTION: Enable/disable sensor HDR.
13589 *
13590 * PARAMETERS :
13591 * @hal_metadata: hal metadata structure
13592 * @enable: boolean whether to enable/disable sensor HDR
13593 *
13594 * RETURN : None
13595 *==========================================================================*/
13596int32_t QCamera3HardwareInterface::setSensorHDR(
13597 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13598{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013599 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013600 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13601
13602 if (enable) {
13603 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13604 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13605 #ifdef _LE_CAMERA_
13606 //Default to staggered HDR for IOT
13607 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13608 #else
13609 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13610 #endif
13611 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13612 }
13613
13614 bool isSupported = false;
13615 switch (sensor_hdr) {
13616 case CAM_SENSOR_HDR_IN_SENSOR:
13617 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13618 CAM_QCOM_FEATURE_SENSOR_HDR) {
13619 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013620 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013621 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013622 break;
13623 case CAM_SENSOR_HDR_ZIGZAG:
13624 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13625 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13626 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013627 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013628 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013629 break;
13630 case CAM_SENSOR_HDR_STAGGERED:
13631 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13632 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13633 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013634 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013635 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013636 break;
13637 case CAM_SENSOR_HDR_OFF:
13638 isSupported = true;
13639 LOGD("Turning off sensor HDR");
13640 break;
13641 default:
13642 LOGE("HDR mode %d not supported", sensor_hdr);
13643 rc = BAD_VALUE;
13644 break;
13645 }
13646
13647 if(isSupported) {
13648 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13649 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13650 rc = BAD_VALUE;
13651 } else {
13652 if(!isVideoHdrEnable)
13653 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013654 }
13655 }
13656 return rc;
13657}
13658
13659/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013660 * FUNCTION : needRotationReprocess
13661 *
13662 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13663 *
13664 * PARAMETERS : none
13665 *
13666 * RETURN : true: needed
13667 * false: no need
13668 *==========================================================================*/
13669bool QCamera3HardwareInterface::needRotationReprocess()
13670{
13671 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13672 // current rotation is not zero, and pp has the capability to process rotation
13673 LOGH("need do reprocess for rotation");
13674 return true;
13675 }
13676
13677 return false;
13678}
13679
13680/*===========================================================================
13681 * FUNCTION : needReprocess
13682 *
13683 * DESCRIPTION: if reprocess in needed
13684 *
13685 * PARAMETERS : none
13686 *
13687 * RETURN : true: needed
13688 * false: no need
13689 *==========================================================================*/
13690bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13691{
13692 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13693 // TODO: add for ZSL HDR later
13694 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13695 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13696 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13697 return true;
13698 } else {
13699 LOGH("already post processed frame");
13700 return false;
13701 }
13702 }
13703 return needRotationReprocess();
13704}
13705
13706/*===========================================================================
13707 * FUNCTION : needJpegExifRotation
13708 *
13709 * DESCRIPTION: if rotation from jpeg is needed
13710 *
13711 * PARAMETERS : none
13712 *
13713 * RETURN : true: needed
13714 * false: no need
13715 *==========================================================================*/
13716bool QCamera3HardwareInterface::needJpegExifRotation()
13717{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013718 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013719 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13720 LOGD("Need use Jpeg EXIF Rotation");
13721 return true;
13722 }
13723 return false;
13724}
13725
13726/*===========================================================================
13727 * FUNCTION : addOfflineReprocChannel
13728 *
13729 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13730 * coming from input channel
13731 *
13732 * PARAMETERS :
13733 * @config : reprocess configuration
13734 * @inputChHandle : pointer to the input (source) channel
13735 *
13736 *
13737 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13738 *==========================================================================*/
13739QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13740 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13741{
13742 int32_t rc = NO_ERROR;
13743 QCamera3ReprocessChannel *pChannel = NULL;
13744
13745 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013746 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13747 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013748 if (NULL == pChannel) {
13749 LOGE("no mem for reprocess channel");
13750 return NULL;
13751 }
13752
13753 rc = pChannel->initialize(IS_TYPE_NONE);
13754 if (rc != NO_ERROR) {
13755 LOGE("init reprocess channel failed, ret = %d", rc);
13756 delete pChannel;
13757 return NULL;
13758 }
13759
13760 // pp feature config
13761 cam_pp_feature_config_t pp_config;
13762 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13763
13764 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13765 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13766 & CAM_QCOM_FEATURE_DSDN) {
13767 //Use CPP CDS incase h/w supports it.
13768 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13769 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13770 }
13771 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13772 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13773 }
13774
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013775 if (config.hdr_param.hdr_enable) {
13776 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13777 pp_config.hdr_param = config.hdr_param;
13778 }
13779
13780 if (mForceHdrSnapshot) {
13781 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13782 pp_config.hdr_param.hdr_enable = 1;
13783 pp_config.hdr_param.hdr_need_1x = 0;
13784 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13785 }
13786
Thierry Strudel3d639192016-09-09 11:52:26 -070013787 rc = pChannel->addReprocStreamsFromSource(pp_config,
13788 config,
13789 IS_TYPE_NONE,
13790 mMetadataChannel);
13791
13792 if (rc != NO_ERROR) {
13793 delete pChannel;
13794 return NULL;
13795 }
13796 return pChannel;
13797}
13798
13799/*===========================================================================
13800 * FUNCTION : getMobicatMask
13801 *
13802 * DESCRIPTION: returns mobicat mask
13803 *
13804 * PARAMETERS : none
13805 *
13806 * RETURN : mobicat mask
13807 *
13808 *==========================================================================*/
13809uint8_t QCamera3HardwareInterface::getMobicatMask()
13810{
13811 return m_MobicatMask;
13812}
13813
13814/*===========================================================================
13815 * FUNCTION : setMobicat
13816 *
13817 * DESCRIPTION: set Mobicat on/off.
13818 *
13819 * PARAMETERS :
13820 * @params : none
13821 *
13822 * RETURN : int32_t type of status
13823 * NO_ERROR -- success
13824 * none-zero failure code
13825 *==========================================================================*/
13826int32_t QCamera3HardwareInterface::setMobicat()
13827{
Thierry Strudel3d639192016-09-09 11:52:26 -070013828 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013829
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013830 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013831 tune_cmd_t tune_cmd;
13832 tune_cmd.type = SET_RELOAD_CHROMATIX;
13833 tune_cmd.module = MODULE_ALL;
13834 tune_cmd.value = TRUE;
13835 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13836 CAM_INTF_PARM_SET_VFE_COMMAND,
13837 tune_cmd);
13838
13839 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13840 CAM_INTF_PARM_SET_PP_COMMAND,
13841 tune_cmd);
13842 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013843
13844 return ret;
13845}
13846
13847/*===========================================================================
13848* FUNCTION : getLogLevel
13849*
13850* DESCRIPTION: Reads the log level property into a variable
13851*
13852* PARAMETERS :
13853* None
13854*
13855* RETURN :
13856* None
13857*==========================================================================*/
13858void QCamera3HardwareInterface::getLogLevel()
13859{
13860 char prop[PROPERTY_VALUE_MAX];
13861 uint32_t globalLogLevel = 0;
13862
13863 property_get("persist.camera.hal.debug", prop, "0");
13864 int val = atoi(prop);
13865 if (0 <= val) {
13866 gCamHal3LogLevel = (uint32_t)val;
13867 }
13868
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013869 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013870 gKpiDebugLevel = atoi(prop);
13871
13872 property_get("persist.camera.global.debug", prop, "0");
13873 val = atoi(prop);
13874 if (0 <= val) {
13875 globalLogLevel = (uint32_t)val;
13876 }
13877
13878 /* Highest log level among hal.logs and global.logs is selected */
13879 if (gCamHal3LogLevel < globalLogLevel)
13880 gCamHal3LogLevel = globalLogLevel;
13881
13882 return;
13883}
13884
13885/*===========================================================================
13886 * FUNCTION : validateStreamRotations
13887 *
13888 * DESCRIPTION: Check if the rotations requested are supported
13889 *
13890 * PARAMETERS :
13891 * @stream_list : streams to be configured
13892 *
13893 * RETURN : NO_ERROR on success
13894 * -EINVAL on failure
13895 *
13896 *==========================================================================*/
13897int QCamera3HardwareInterface::validateStreamRotations(
13898 camera3_stream_configuration_t *streamList)
13899{
13900 int rc = NO_ERROR;
13901
13902 /*
13903 * Loop through all streams requested in configuration
13904 * Check if unsupported rotations have been requested on any of them
13905 */
13906 for (size_t j = 0; j < streamList->num_streams; j++){
13907 camera3_stream_t *newStream = streamList->streams[j];
13908
Emilian Peev35ceeed2017-06-29 11:58:56 -070013909 switch(newStream->rotation) {
13910 case CAMERA3_STREAM_ROTATION_0:
13911 case CAMERA3_STREAM_ROTATION_90:
13912 case CAMERA3_STREAM_ROTATION_180:
13913 case CAMERA3_STREAM_ROTATION_270:
13914 //Expected values
13915 break;
13916 default:
13917 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13918 "type:%d and stream format:%d", __func__,
13919 newStream->rotation, newStream->stream_type,
13920 newStream->format);
13921 return -EINVAL;
13922 }
13923
Thierry Strudel3d639192016-09-09 11:52:26 -070013924 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13925 bool isImplDef = (newStream->format ==
13926 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13927 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13928 isImplDef);
13929
13930 if (isRotated && (!isImplDef || isZsl)) {
13931 LOGE("Error: Unsupported rotation of %d requested for stream"
13932 "type:%d and stream format:%d",
13933 newStream->rotation, newStream->stream_type,
13934 newStream->format);
13935 rc = -EINVAL;
13936 break;
13937 }
13938 }
13939
13940 return rc;
13941}
13942
13943/*===========================================================================
13944* FUNCTION : getFlashInfo
13945*
13946* DESCRIPTION: Retrieve information about whether the device has a flash.
13947*
13948* PARAMETERS :
13949* @cameraId : Camera id to query
13950* @hasFlash : Boolean indicating whether there is a flash device
13951* associated with given camera
13952* @flashNode : If a flash device exists, this will be its device node.
13953*
13954* RETURN :
13955* None
13956*==========================================================================*/
13957void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13958 bool& hasFlash,
13959 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13960{
13961 cam_capability_t* camCapability = gCamCapability[cameraId];
13962 if (NULL == camCapability) {
13963 hasFlash = false;
13964 flashNode[0] = '\0';
13965 } else {
13966 hasFlash = camCapability->flash_available;
13967 strlcpy(flashNode,
13968 (char*)camCapability->flash_dev_name,
13969 QCAMERA_MAX_FILEPATH_LENGTH);
13970 }
13971}
13972
13973/*===========================================================================
13974* FUNCTION : getEepromVersionInfo
13975*
13976* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13977*
13978* PARAMETERS : None
13979*
13980* RETURN : string describing EEPROM version
13981* "\0" if no such info available
13982*==========================================================================*/
13983const char *QCamera3HardwareInterface::getEepromVersionInfo()
13984{
13985 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13986}
13987
13988/*===========================================================================
13989* FUNCTION : getLdafCalib
13990*
13991* DESCRIPTION: Retrieve Laser AF calibration data
13992*
13993* PARAMETERS : None
13994*
13995* RETURN : Two uint32_t describing laser AF calibration data
13996* NULL if none is available.
13997*==========================================================================*/
13998const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13999{
14000 if (mLdafCalibExist) {
14001 return &mLdafCalib[0];
14002 } else {
14003 return NULL;
14004 }
14005}
14006
14007/*===========================================================================
14008 * FUNCTION : dynamicUpdateMetaStreamInfo
14009 *
14010 * DESCRIPTION: This function:
14011 * (1) stops all the channels
14012 * (2) returns error on pending requests and buffers
14013 * (3) sends metastream_info in setparams
14014 * (4) starts all channels
14015 * This is useful when sensor has to be restarted to apply any
14016 * settings such as frame rate from a different sensor mode
14017 *
14018 * PARAMETERS : None
14019 *
14020 * RETURN : NO_ERROR on success
14021 * Error codes on failure
14022 *
14023 *==========================================================================*/
14024int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14025{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014026 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014027 int rc = NO_ERROR;
14028
14029 LOGD("E");
14030
14031 rc = stopAllChannels();
14032 if (rc < 0) {
14033 LOGE("stopAllChannels failed");
14034 return rc;
14035 }
14036
14037 rc = notifyErrorForPendingRequests();
14038 if (rc < 0) {
14039 LOGE("notifyErrorForPendingRequests failed");
14040 return rc;
14041 }
14042
14043 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14044 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14045 "Format:%d",
14046 mStreamConfigInfo.type[i],
14047 mStreamConfigInfo.stream_sizes[i].width,
14048 mStreamConfigInfo.stream_sizes[i].height,
14049 mStreamConfigInfo.postprocess_mask[i],
14050 mStreamConfigInfo.format[i]);
14051 }
14052
14053 /* Send meta stream info once again so that ISP can start */
14054 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14055 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14056 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14057 mParameters);
14058 if (rc < 0) {
14059 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14060 }
14061
14062 rc = startAllChannels();
14063 if (rc < 0) {
14064 LOGE("startAllChannels failed");
14065 return rc;
14066 }
14067
14068 LOGD("X");
14069 return rc;
14070}
14071
14072/*===========================================================================
14073 * FUNCTION : stopAllChannels
14074 *
14075 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14076 *
14077 * PARAMETERS : None
14078 *
14079 * RETURN : NO_ERROR on success
14080 * Error codes on failure
14081 *
14082 *==========================================================================*/
14083int32_t QCamera3HardwareInterface::stopAllChannels()
14084{
14085 int32_t rc = NO_ERROR;
14086
14087 LOGD("Stopping all channels");
14088 // Stop the Streams/Channels
14089 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14090 it != mStreamInfo.end(); it++) {
14091 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14092 if (channel) {
14093 channel->stop();
14094 }
14095 (*it)->status = INVALID;
14096 }
14097
14098 if (mSupportChannel) {
14099 mSupportChannel->stop();
14100 }
14101 if (mAnalysisChannel) {
14102 mAnalysisChannel->stop();
14103 }
14104 if (mRawDumpChannel) {
14105 mRawDumpChannel->stop();
14106 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014107 if (mHdrPlusRawSrcChannel) {
14108 mHdrPlusRawSrcChannel->stop();
14109 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014110 if (mMetadataChannel) {
14111 /* If content of mStreamInfo is not 0, there is metadata stream */
14112 mMetadataChannel->stop();
14113 }
14114
14115 LOGD("All channels stopped");
14116 return rc;
14117}
14118
14119/*===========================================================================
14120 * FUNCTION : startAllChannels
14121 *
14122 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14123 *
14124 * PARAMETERS : None
14125 *
14126 * RETURN : NO_ERROR on success
14127 * Error codes on failure
14128 *
14129 *==========================================================================*/
14130int32_t QCamera3HardwareInterface::startAllChannels()
14131{
14132 int32_t rc = NO_ERROR;
14133
14134 LOGD("Start all channels ");
14135 // Start the Streams/Channels
14136 if (mMetadataChannel) {
14137 /* If content of mStreamInfo is not 0, there is metadata stream */
14138 rc = mMetadataChannel->start();
14139 if (rc < 0) {
14140 LOGE("META channel start failed");
14141 return rc;
14142 }
14143 }
14144 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14145 it != mStreamInfo.end(); it++) {
14146 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14147 if (channel) {
14148 rc = channel->start();
14149 if (rc < 0) {
14150 LOGE("channel start failed");
14151 return rc;
14152 }
14153 }
14154 }
14155 if (mAnalysisChannel) {
14156 mAnalysisChannel->start();
14157 }
14158 if (mSupportChannel) {
14159 rc = mSupportChannel->start();
14160 if (rc < 0) {
14161 LOGE("Support channel start failed");
14162 return rc;
14163 }
14164 }
14165 if (mRawDumpChannel) {
14166 rc = mRawDumpChannel->start();
14167 if (rc < 0) {
14168 LOGE("RAW dump channel start failed");
14169 return rc;
14170 }
14171 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014172 if (mHdrPlusRawSrcChannel) {
14173 rc = mHdrPlusRawSrcChannel->start();
14174 if (rc < 0) {
14175 LOGE("HDR+ RAW channel start failed");
14176 return rc;
14177 }
14178 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014179
14180 LOGD("All channels started");
14181 return rc;
14182}
14183
14184/*===========================================================================
14185 * FUNCTION : notifyErrorForPendingRequests
14186 *
14187 * DESCRIPTION: This function sends error for all the pending requests/buffers
14188 *
14189 * PARAMETERS : None
14190 *
14191 * RETURN : Error codes
14192 * NO_ERROR on success
14193 *
14194 *==========================================================================*/
14195int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14196{
Emilian Peev7650c122017-01-19 08:24:33 -080014197 notifyErrorFoPendingDepthData(mDepthChannel);
14198
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014199 auto pendingRequest = mPendingRequestsList.begin();
14200 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014201
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014202 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14203 // buffers (for which buffers aren't sent yet).
14204 while (pendingRequest != mPendingRequestsList.end() ||
14205 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14206 if (pendingRequest == mPendingRequestsList.end() ||
14207 pendingBuffer->frame_number < pendingRequest->frame_number) {
14208 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14209 // with error.
14210 for (auto &info : pendingBuffer->mPendingBufferList) {
14211 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014212 camera3_notify_msg_t notify_msg;
14213 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14214 notify_msg.type = CAMERA3_MSG_ERROR;
14215 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014216 notify_msg.message.error.error_stream = info.stream;
14217 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014218 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014219
14220 camera3_stream_buffer_t buffer = {};
14221 buffer.acquire_fence = -1;
14222 buffer.release_fence = -1;
14223 buffer.buffer = info.buffer;
14224 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14225 buffer.stream = info.stream;
14226 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014227 }
14228
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014229 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14230 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14231 pendingBuffer->frame_number > pendingRequest->frame_number) {
14232 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014233 camera3_notify_msg_t notify_msg;
14234 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14235 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014236 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14237 notify_msg.message.error.error_stream = nullptr;
14238 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014239 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014240
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014241 if (pendingRequest->input_buffer != nullptr) {
14242 camera3_capture_result result = {};
14243 result.frame_number = pendingRequest->frame_number;
14244 result.result = nullptr;
14245 result.input_buffer = pendingRequest->input_buffer;
14246 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014247 }
14248
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014249 mShutterDispatcher.clear(pendingRequest->frame_number);
14250 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14251 } else {
14252 // If both buffers and result metadata weren't sent yet, notify about a request error
14253 // and return buffers with error.
14254 for (auto &info : pendingBuffer->mPendingBufferList) {
14255 camera3_notify_msg_t notify_msg;
14256 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14257 notify_msg.type = CAMERA3_MSG_ERROR;
14258 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14259 notify_msg.message.error.error_stream = info.stream;
14260 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14261 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014262
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014263 camera3_stream_buffer_t buffer = {};
14264 buffer.acquire_fence = -1;
14265 buffer.release_fence = -1;
14266 buffer.buffer = info.buffer;
14267 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14268 buffer.stream = info.stream;
14269 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14270 }
14271
14272 if (pendingRequest->input_buffer != nullptr) {
14273 camera3_capture_result result = {};
14274 result.frame_number = pendingRequest->frame_number;
14275 result.result = nullptr;
14276 result.input_buffer = pendingRequest->input_buffer;
14277 orchestrateResult(&result);
14278 }
14279
14280 mShutterDispatcher.clear(pendingRequest->frame_number);
14281 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14282 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014283 }
14284 }
14285
14286 /* Reset pending frame Drop list and requests list */
14287 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014288 mShutterDispatcher.clear();
14289 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014290 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014291 mExpectedFrameDuration = 0;
14292 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014293 LOGH("Cleared all the pending buffers ");
14294
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014295 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014296}
14297
14298bool QCamera3HardwareInterface::isOnEncoder(
14299 const cam_dimension_t max_viewfinder_size,
14300 uint32_t width, uint32_t height)
14301{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014302 return ((width > (uint32_t)max_viewfinder_size.width) ||
14303 (height > (uint32_t)max_viewfinder_size.height) ||
14304 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14305 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014306}
14307
14308/*===========================================================================
14309 * FUNCTION : setBundleInfo
14310 *
14311 * DESCRIPTION: Set bundle info for all streams that are bundle.
14312 *
14313 * PARAMETERS : None
14314 *
14315 * RETURN : NO_ERROR on success
14316 * Error codes on failure
14317 *==========================================================================*/
14318int32_t QCamera3HardwareInterface::setBundleInfo()
14319{
14320 int32_t rc = NO_ERROR;
14321
14322 if (mChannelHandle) {
14323 cam_bundle_config_t bundleInfo;
14324 memset(&bundleInfo, 0, sizeof(bundleInfo));
14325 rc = mCameraHandle->ops->get_bundle_info(
14326 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14327 if (rc != NO_ERROR) {
14328 LOGE("get_bundle_info failed");
14329 return rc;
14330 }
14331 if (mAnalysisChannel) {
14332 mAnalysisChannel->setBundleInfo(bundleInfo);
14333 }
14334 if (mSupportChannel) {
14335 mSupportChannel->setBundleInfo(bundleInfo);
14336 }
14337 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14338 it != mStreamInfo.end(); it++) {
14339 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14340 channel->setBundleInfo(bundleInfo);
14341 }
14342 if (mRawDumpChannel) {
14343 mRawDumpChannel->setBundleInfo(bundleInfo);
14344 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014345 if (mHdrPlusRawSrcChannel) {
14346 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14347 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014348 }
14349
14350 return rc;
14351}
14352
14353/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014354 * FUNCTION : setInstantAEC
14355 *
14356 * DESCRIPTION: Set Instant AEC related params.
14357 *
14358 * PARAMETERS :
14359 * @meta: CameraMetadata reference
14360 *
14361 * RETURN : NO_ERROR on success
14362 * Error codes on failure
14363 *==========================================================================*/
14364int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14365{
14366 int32_t rc = NO_ERROR;
14367 uint8_t val = 0;
14368 char prop[PROPERTY_VALUE_MAX];
14369
14370 // First try to configure instant AEC from framework metadata
14371 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14372 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14373 }
14374
14375 // If framework did not set this value, try to read from set prop.
14376 if (val == 0) {
14377 memset(prop, 0, sizeof(prop));
14378 property_get("persist.camera.instant.aec", prop, "0");
14379 val = (uint8_t)atoi(prop);
14380 }
14381
14382 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14383 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14384 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14385 mInstantAEC = val;
14386 mInstantAECSettledFrameNumber = 0;
14387 mInstantAecFrameIdxCount = 0;
14388 LOGH("instantAEC value set %d",val);
14389 if (mInstantAEC) {
14390 memset(prop, 0, sizeof(prop));
14391 property_get("persist.camera.ae.instant.bound", prop, "10");
14392 int32_t aec_frame_skip_cnt = atoi(prop);
14393 if (aec_frame_skip_cnt >= 0) {
14394 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14395 } else {
14396 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14397 rc = BAD_VALUE;
14398 }
14399 }
14400 } else {
14401 LOGE("Bad instant aec value set %d", val);
14402 rc = BAD_VALUE;
14403 }
14404 return rc;
14405}
14406
14407/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014408 * FUNCTION : get_num_overall_buffers
14409 *
14410 * DESCRIPTION: Estimate number of pending buffers across all requests.
14411 *
14412 * PARAMETERS : None
14413 *
14414 * RETURN : Number of overall pending buffers
14415 *
14416 *==========================================================================*/
14417uint32_t PendingBuffersMap::get_num_overall_buffers()
14418{
14419 uint32_t sum_buffers = 0;
14420 for (auto &req : mPendingBuffersInRequest) {
14421 sum_buffers += req.mPendingBufferList.size();
14422 }
14423 return sum_buffers;
14424}
14425
14426/*===========================================================================
14427 * FUNCTION : removeBuf
14428 *
14429 * DESCRIPTION: Remove a matching buffer from tracker.
14430 *
14431 * PARAMETERS : @buffer: image buffer for the callback
14432 *
14433 * RETURN : None
14434 *
14435 *==========================================================================*/
14436void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14437{
14438 bool buffer_found = false;
14439 for (auto req = mPendingBuffersInRequest.begin();
14440 req != mPendingBuffersInRequest.end(); req++) {
14441 for (auto k = req->mPendingBufferList.begin();
14442 k != req->mPendingBufferList.end(); k++ ) {
14443 if (k->buffer == buffer) {
14444 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14445 req->frame_number, buffer);
14446 k = req->mPendingBufferList.erase(k);
14447 if (req->mPendingBufferList.empty()) {
14448 // Remove this request from Map
14449 req = mPendingBuffersInRequest.erase(req);
14450 }
14451 buffer_found = true;
14452 break;
14453 }
14454 }
14455 if (buffer_found) {
14456 break;
14457 }
14458 }
14459 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14460 get_num_overall_buffers());
14461}
14462
14463/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014464 * FUNCTION : getBufErrStatus
14465 *
14466 * DESCRIPTION: get buffer error status
14467 *
14468 * PARAMETERS : @buffer: buffer handle
14469 *
14470 * RETURN : Error status
14471 *
14472 *==========================================================================*/
14473int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14474{
14475 for (auto& req : mPendingBuffersInRequest) {
14476 for (auto& k : req.mPendingBufferList) {
14477 if (k.buffer == buffer)
14478 return k.bufStatus;
14479 }
14480 }
14481 return CAMERA3_BUFFER_STATUS_OK;
14482}
14483
14484/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014485 * FUNCTION : setPAAFSupport
14486 *
14487 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14488 * feature mask according to stream type and filter
14489 * arrangement
14490 *
14491 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14492 * @stream_type: stream type
14493 * @filter_arrangement: filter arrangement
14494 *
14495 * RETURN : None
14496 *==========================================================================*/
14497void QCamera3HardwareInterface::setPAAFSupport(
14498 cam_feature_mask_t& feature_mask,
14499 cam_stream_type_t stream_type,
14500 cam_color_filter_arrangement_t filter_arrangement)
14501{
Thierry Strudel3d639192016-09-09 11:52:26 -070014502 switch (filter_arrangement) {
14503 case CAM_FILTER_ARRANGEMENT_RGGB:
14504 case CAM_FILTER_ARRANGEMENT_GRBG:
14505 case CAM_FILTER_ARRANGEMENT_GBRG:
14506 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014507 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14508 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014509 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014510 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14511 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014512 }
14513 break;
14514 case CAM_FILTER_ARRANGEMENT_Y:
14515 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14516 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14517 }
14518 break;
14519 default:
14520 break;
14521 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014522 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14523 feature_mask, stream_type, filter_arrangement);
14524
14525
Thierry Strudel3d639192016-09-09 11:52:26 -070014526}
14527
14528/*===========================================================================
14529* FUNCTION : getSensorMountAngle
14530*
14531* DESCRIPTION: Retrieve sensor mount angle
14532*
14533* PARAMETERS : None
14534*
14535* RETURN : sensor mount angle in uint32_t
14536*==========================================================================*/
14537uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14538{
14539 return gCamCapability[mCameraId]->sensor_mount_angle;
14540}
14541
14542/*===========================================================================
14543* FUNCTION : getRelatedCalibrationData
14544*
14545* DESCRIPTION: Retrieve related system calibration data
14546*
14547* PARAMETERS : None
14548*
14549* RETURN : Pointer of related system calibration data
14550*==========================================================================*/
14551const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14552{
14553 return (const cam_related_system_calibration_data_t *)
14554 &(gCamCapability[mCameraId]->related_cam_calibration);
14555}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014556
14557/*===========================================================================
14558 * FUNCTION : is60HzZone
14559 *
14560 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14561 *
14562 * PARAMETERS : None
14563 *
14564 * RETURN : True if in 60Hz zone, False otherwise
14565 *==========================================================================*/
14566bool QCamera3HardwareInterface::is60HzZone()
14567{
14568 time_t t = time(NULL);
14569 struct tm lt;
14570
14571 struct tm* r = localtime_r(&t, &lt);
14572
14573 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14574 return true;
14575 else
14576 return false;
14577}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014578
14579/*===========================================================================
14580 * FUNCTION : adjustBlackLevelForCFA
14581 *
14582 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14583 * of bayer CFA (Color Filter Array).
14584 *
14585 * PARAMETERS : @input: black level pattern in the order of RGGB
14586 * @output: black level pattern in the order of CFA
14587 * @color_arrangement: CFA color arrangement
14588 *
14589 * RETURN : None
14590 *==========================================================================*/
14591template<typename T>
14592void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14593 T input[BLACK_LEVEL_PATTERN_CNT],
14594 T output[BLACK_LEVEL_PATTERN_CNT],
14595 cam_color_filter_arrangement_t color_arrangement)
14596{
14597 switch (color_arrangement) {
14598 case CAM_FILTER_ARRANGEMENT_GRBG:
14599 output[0] = input[1];
14600 output[1] = input[0];
14601 output[2] = input[3];
14602 output[3] = input[2];
14603 break;
14604 case CAM_FILTER_ARRANGEMENT_GBRG:
14605 output[0] = input[2];
14606 output[1] = input[3];
14607 output[2] = input[0];
14608 output[3] = input[1];
14609 break;
14610 case CAM_FILTER_ARRANGEMENT_BGGR:
14611 output[0] = input[3];
14612 output[1] = input[2];
14613 output[2] = input[1];
14614 output[3] = input[0];
14615 break;
14616 case CAM_FILTER_ARRANGEMENT_RGGB:
14617 output[0] = input[0];
14618 output[1] = input[1];
14619 output[2] = input[2];
14620 output[3] = input[3];
14621 break;
14622 default:
14623 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14624 break;
14625 }
14626}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014627
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014628void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14629 CameraMetadata &resultMetadata,
14630 std::shared_ptr<metadata_buffer_t> settings)
14631{
14632 if (settings == nullptr) {
14633 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14634 return;
14635 }
14636
14637 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14638 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14639 }
14640
14641 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14642 String8 str((const char *)gps_methods);
14643 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14644 }
14645
14646 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14647 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14648 }
14649
14650 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14651 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14652 }
14653
14654 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14655 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14656 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14657 }
14658
14659 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14660 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14661 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14662 }
14663
14664 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14665 int32_t fwk_thumb_size[2];
14666 fwk_thumb_size[0] = thumb_size->width;
14667 fwk_thumb_size[1] = thumb_size->height;
14668 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14669 }
14670
14671 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14672 uint8_t fwk_intent = intent[0];
14673 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14674 }
14675}
14676
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014677bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14678 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014679 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14680 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14681 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014682 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014683 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014684 return false;
14685 }
14686
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014687 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014688 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14689 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014690 return false;
14691 }
14692
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014693 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14694 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14695 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14696 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14697 return false;
14698 }
14699
14700 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14701 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14702 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14703 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14704 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14705 return false;
14706 }
14707
14708 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14709 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14710 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14711 return false;
14712 }
14713
14714 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14715 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14716 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14717 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14718 return false;
14719 }
14720
14721 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14722 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14723 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14724 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14725 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14726 return false;
14727 }
14728
14729 // TODO (b/32585046): support non-ZSL.
14730 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14731 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14732 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14733 return false;
14734 }
14735
14736 // TODO (b/32586081): support flash.
14737 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14738 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14739 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14740 return false;
14741 }
14742
14743 // TODO (b/36492953): support digital zoom.
14744 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14745 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14746 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14747 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14748 gCamCapability[mCameraId]->active_array_size.width ||
14749 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14750 gCamCapability[mCameraId]->active_array_size.height) {
14751 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14752 __FUNCTION__);
14753 return false;
14754 }
14755
14756 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14757 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14758 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14759 return false;
14760 }
14761
14762 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014763 if (request.num_output_buffers != 1 ||
14764 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014765 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014766 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014767 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014768 request.output_buffers[0].stream->width,
14769 request.output_buffers[0].stream->height,
14770 request.output_buffers[0].stream->format);
14771 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014772 return false;
14773 }
14774
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014775 return true;
14776}
14777
14778bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14779 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14780 const CameraMetadata &metadata)
14781{
14782 if (hdrPlusRequest == nullptr) return false;
14783 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14784
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014785 // Get a YUV buffer from pic channel.
14786 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14787 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14788 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14789 if (res != OK) {
14790 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14791 __FUNCTION__, strerror(-res), res);
14792 return false;
14793 }
14794
14795 pbcamera::StreamBuffer buffer;
14796 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014797 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014798 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014799 buffer.dataSize = yuvBuffer->frame_len;
14800
14801 pbcamera::CaptureRequest pbRequest;
14802 pbRequest.id = request.frame_number;
14803 pbRequest.outputBuffers.push_back(buffer);
14804
14805 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014806 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014807 if (res != OK) {
14808 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14809 strerror(-res), res);
14810 return false;
14811 }
14812
14813 hdrPlusRequest->yuvBuffer = yuvBuffer;
14814 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14815
14816 return true;
14817}
14818
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014819status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14820{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014821 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14822 return OK;
14823 }
14824
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014825 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014826 if (res != OK) {
14827 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14828 strerror(-res), res);
14829 return res;
14830 }
14831 gHdrPlusClientOpening = true;
14832
14833 return OK;
14834}
14835
Chien-Yu Chenee335912017-02-09 17:53:20 -080014836status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14837{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014838 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014839
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014840 if (mHdrPlusModeEnabled) {
14841 return OK;
14842 }
14843
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014844 // Check if gHdrPlusClient is opened or being opened.
14845 if (gHdrPlusClient == nullptr) {
14846 if (gHdrPlusClientOpening) {
14847 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14848 return OK;
14849 }
14850
14851 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014852 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014853 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14854 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014855 return res;
14856 }
14857
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014858 // When opening HDR+ client completes, HDR+ mode will be enabled.
14859 return OK;
14860
Chien-Yu Chenee335912017-02-09 17:53:20 -080014861 }
14862
14863 // Configure stream for HDR+.
14864 res = configureHdrPlusStreamsLocked();
14865 if (res != OK) {
14866 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014867 return res;
14868 }
14869
14870 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14871 res = gHdrPlusClient->setZslHdrPlusMode(true);
14872 if (res != OK) {
14873 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014874 return res;
14875 }
14876
14877 mHdrPlusModeEnabled = true;
14878 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14879
14880 return OK;
14881}
14882
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014883void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14884{
14885 if (gHdrPlusClientOpening) {
14886 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14887 }
14888}
14889
Chien-Yu Chenee335912017-02-09 17:53:20 -080014890void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14891{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014892 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014893 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014894 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14895 if (res != OK) {
14896 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14897 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014898
14899 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014900 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014901 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014902 }
14903
14904 mHdrPlusModeEnabled = false;
14905 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14906}
14907
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014908bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14909{
14910 // Check if mPictureChannel is valid.
14911 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14912 if (mPictureChannel == nullptr) {
14913 return false;
14914 }
14915
14916 return true;
14917}
14918
Chien-Yu Chenee335912017-02-09 17:53:20 -080014919status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014920{
14921 pbcamera::InputConfiguration inputConfig;
14922 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14923 status_t res = OK;
14924
14925 // Configure HDR+ client streams.
14926 // Get input config.
14927 if (mHdrPlusRawSrcChannel) {
14928 // HDR+ input buffers will be provided by HAL.
14929 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14930 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14931 if (res != OK) {
14932 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14933 __FUNCTION__, strerror(-res), res);
14934 return res;
14935 }
14936
14937 inputConfig.isSensorInput = false;
14938 } else {
14939 // Sensor MIPI will send data to Easel.
14940 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014941 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014942 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14943 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14944 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14945 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14946 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014947 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014948 if (mSensorModeInfo.num_raw_bits != 10) {
14949 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14950 mSensorModeInfo.num_raw_bits);
14951 return BAD_VALUE;
14952 }
14953
14954 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014955 }
14956
14957 // Get output configurations.
14958 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014959 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014960
14961 // Easel may need to output YUV output buffers if mPictureChannel was created.
14962 pbcamera::StreamConfiguration yuvOutputConfig;
14963 if (mPictureChannel != nullptr) {
14964 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14965 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14966 if (res != OK) {
14967 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14968 __FUNCTION__, strerror(-res), res);
14969
14970 return res;
14971 }
14972
14973 outputStreamConfigs.push_back(yuvOutputConfig);
14974 }
14975
14976 // TODO: consider other channels for YUV output buffers.
14977
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014978 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014979 if (res != OK) {
14980 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14981 strerror(-res), res);
14982 return res;
14983 }
14984
14985 return OK;
14986}
14987
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014988void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
14989{
14990 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
14991 // Set HAL state to error.
14992 pthread_mutex_lock(&mMutex);
14993 mState = ERROR;
14994 pthread_mutex_unlock(&mMutex);
14995
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070014996 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070014997}
14998
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014999void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15000{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015001 if (client == nullptr) {
15002 ALOGE("%s: Opened client is null.", __FUNCTION__);
15003 return;
15004 }
15005
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015006 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015007 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15008
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015009 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015010 if (!gHdrPlusClientOpening) {
15011 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15012 return;
15013 }
15014
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015015 gHdrPlusClient = std::move(client);
15016 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015017 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015018
15019 // Set static metadata.
15020 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15021 if (res != OK) {
15022 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15023 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015024 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015025 gHdrPlusClient = nullptr;
15026 return;
15027 }
15028
15029 // Enable HDR+ mode.
15030 res = enableHdrPlusModeLocked();
15031 if (res != OK) {
15032 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15033 }
15034}
15035
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015036void QCamera3HardwareInterface::onOpenFailed(status_t err)
15037{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015038 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015039 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015040 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015041 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015042}
15043
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015044void QCamera3HardwareInterface::onFatalError()
15045{
15046 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15047
15048 // Set HAL state to error.
15049 pthread_mutex_lock(&mMutex);
15050 mState = ERROR;
15051 pthread_mutex_unlock(&mMutex);
15052
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015053 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015054}
15055
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015056void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15057{
15058 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15059 __LINE__, requestId, apSensorTimestampNs);
15060
15061 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15062}
15063
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015064void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015065 const camera_metadata_t &resultMetadata)
15066{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015067 if (result != nullptr) {
15068 if (result->outputBuffers.size() != 1) {
15069 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15070 result->outputBuffers.size());
15071 return;
15072 }
15073
15074 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15075 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15076 result->outputBuffers[0].streamId);
15077 return;
15078 }
15079
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015080 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015081 HdrPlusPendingRequest pendingRequest;
15082 {
15083 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15084 auto req = mHdrPlusPendingRequests.find(result->requestId);
15085 pendingRequest = req->second;
15086 }
15087
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015088 // Update the result metadata with the settings of the HDR+ still capture request because
15089 // the result metadata belongs to a ZSL buffer.
15090 CameraMetadata metadata;
15091 metadata = &resultMetadata;
15092 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15093 camera_metadata_t* updatedResultMetadata = metadata.release();
15094
15095 QCamera3PicChannel *picChannel =
15096 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15097
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015098 // Check if dumping HDR+ YUV output is enabled.
15099 char prop[PROPERTY_VALUE_MAX];
15100 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15101 bool dumpYuvOutput = atoi(prop);
15102
15103 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015104 // Dump yuv buffer to a ppm file.
15105 pbcamera::StreamConfiguration outputConfig;
15106 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15107 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15108 if (rc == OK) {
15109 char buf[FILENAME_MAX] = {};
15110 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15111 result->requestId, result->outputBuffers[0].streamId,
15112 outputConfig.image.width, outputConfig.image.height);
15113
15114 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15115 } else {
15116 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15117 __FUNCTION__, strerror(-rc), rc);
15118 }
15119 }
15120
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015121 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15122 auto halMetadata = std::make_shared<metadata_buffer_t>();
15123 clear_metadata_buffer(halMetadata.get());
15124
15125 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15126 // encoding.
15127 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15128 halStreamId, /*minFrameDuration*/0);
15129 if (res == OK) {
15130 // Return the buffer to pic channel for encoding.
15131 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15132 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15133 halMetadata);
15134 } else {
15135 // Return the buffer without encoding.
15136 // TODO: This should not happen but we may want to report an error buffer to camera
15137 // service.
15138 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15139 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15140 strerror(-res), res);
15141 }
15142
15143 // Send HDR+ metadata to framework.
15144 {
15145 pthread_mutex_lock(&mMutex);
15146
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015147 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15148 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015149 pthread_mutex_unlock(&mMutex);
15150 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015151
15152 // Remove the HDR+ pending request.
15153 {
15154 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15155 auto req = mHdrPlusPendingRequests.find(result->requestId);
15156 mHdrPlusPendingRequests.erase(req);
15157 }
15158 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015159}
15160
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015161void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15162{
15163 if (failedResult == nullptr) {
15164 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15165 return;
15166 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015167
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015168 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015169
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015170 // Remove the pending HDR+ request.
15171 {
15172 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15173 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15174
15175 // Return the buffer to pic channel.
15176 QCamera3PicChannel *picChannel =
15177 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15178 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15179
15180 mHdrPlusPendingRequests.erase(pendingRequest);
15181 }
15182
15183 pthread_mutex_lock(&mMutex);
15184
15185 // Find the pending buffers.
15186 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15187 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15188 if (pendingBuffers->frame_number == failedResult->requestId) {
15189 break;
15190 }
15191 pendingBuffers++;
15192 }
15193
15194 // Send out buffer errors for the pending buffers.
15195 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15196 std::vector<camera3_stream_buffer_t> streamBuffers;
15197 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15198 // Prepare a stream buffer.
15199 camera3_stream_buffer_t streamBuffer = {};
15200 streamBuffer.stream = buffer.stream;
15201 streamBuffer.buffer = buffer.buffer;
15202 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15203 streamBuffer.acquire_fence = -1;
15204 streamBuffer.release_fence = -1;
15205
15206 streamBuffers.push_back(streamBuffer);
15207
15208 // Send out error buffer event.
15209 camera3_notify_msg_t notify_msg = {};
15210 notify_msg.type = CAMERA3_MSG_ERROR;
15211 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15212 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15213 notify_msg.message.error.error_stream = buffer.stream;
15214
15215 orchestrateNotify(&notify_msg);
15216 }
15217
15218 camera3_capture_result_t result = {};
15219 result.frame_number = pendingBuffers->frame_number;
15220 result.num_output_buffers = streamBuffers.size();
15221 result.output_buffers = &streamBuffers[0];
15222
15223 // Send out result with buffer errors.
15224 orchestrateResult(&result);
15225
15226 // Remove pending buffers.
15227 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15228 }
15229
15230 // Remove pending request.
15231 auto halRequest = mPendingRequestsList.begin();
15232 while (halRequest != mPendingRequestsList.end()) {
15233 if (halRequest->frame_number == failedResult->requestId) {
15234 mPendingRequestsList.erase(halRequest);
15235 break;
15236 }
15237 halRequest++;
15238 }
15239
15240 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015241}
15242
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015243
15244ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15245 mParent(parent) {}
15246
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015247void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015248{
15249 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015250
15251 if (isReprocess) {
15252 mReprocessShutters.emplace(frameNumber, Shutter());
15253 } else {
15254 mShutters.emplace(frameNumber, Shutter());
15255 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015256}
15257
15258void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15259{
15260 std::lock_guard<std::mutex> lock(mLock);
15261
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015262 std::map<uint32_t, Shutter> *shutters = nullptr;
15263
15264 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015265 auto shutter = mShutters.find(frameNumber);
15266 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015267 shutter = mReprocessShutters.find(frameNumber);
15268 if (shutter == mReprocessShutters.end()) {
15269 // Shutter was already sent.
15270 return;
15271 }
15272 shutters = &mReprocessShutters;
15273 } else {
15274 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015275 }
15276
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015277 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015278 shutter->second.ready = true;
15279 shutter->second.timestamp = timestamp;
15280
15281 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015282 shutter = shutters->begin();
15283 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015284 if (!shutter->second.ready) {
15285 // If this shutter is not ready, the following shutters can't be sent.
15286 break;
15287 }
15288
15289 camera3_notify_msg_t msg = {};
15290 msg.type = CAMERA3_MSG_SHUTTER;
15291 msg.message.shutter.frame_number = shutter->first;
15292 msg.message.shutter.timestamp = shutter->second.timestamp;
15293 mParent->orchestrateNotify(&msg);
15294
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015295 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015296 }
15297}
15298
15299void ShutterDispatcher::clear(uint32_t frameNumber)
15300{
15301 std::lock_guard<std::mutex> lock(mLock);
15302 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015303 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015304}
15305
15306void ShutterDispatcher::clear()
15307{
15308 std::lock_guard<std::mutex> lock(mLock);
15309
15310 // Log errors for stale shutters.
15311 for (auto &shutter : mShutters) {
15312 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15313 __FUNCTION__, shutter.first, shutter.second.ready,
15314 shutter.second.timestamp);
15315 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015316
15317 // Log errors for stale reprocess shutters.
15318 for (auto &shutter : mReprocessShutters) {
15319 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15320 __FUNCTION__, shutter.first, shutter.second.ready,
15321 shutter.second.timestamp);
15322 }
15323
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015324 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015325 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015326}
15327
15328OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15329 mParent(parent) {}
15330
15331status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15332{
15333 std::lock_guard<std::mutex> lock(mLock);
15334 mStreamBuffers.clear();
15335 if (!streamList) {
15336 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15337 return -EINVAL;
15338 }
15339
15340 // Create a "frame-number -> buffer" map for each stream.
15341 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15342 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15343 }
15344
15345 return OK;
15346}
15347
15348status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15349{
15350 std::lock_guard<std::mutex> lock(mLock);
15351
15352 // Find the "frame-number -> buffer" map for the stream.
15353 auto buffers = mStreamBuffers.find(stream);
15354 if (buffers == mStreamBuffers.end()) {
15355 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15356 return -EINVAL;
15357 }
15358
15359 // Create an unready buffer for this frame number.
15360 buffers->second.emplace(frameNumber, Buffer());
15361 return OK;
15362}
15363
15364void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15365 const camera3_stream_buffer_t &buffer)
15366{
15367 std::lock_guard<std::mutex> lock(mLock);
15368
15369 // Find the frame number -> buffer map for the stream.
15370 auto buffers = mStreamBuffers.find(buffer.stream);
15371 if (buffers == mStreamBuffers.end()) {
15372 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15373 return;
15374 }
15375
15376 // Find the unready buffer this frame number and mark it ready.
15377 auto pendingBuffer = buffers->second.find(frameNumber);
15378 if (pendingBuffer == buffers->second.end()) {
15379 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15380 return;
15381 }
15382
15383 pendingBuffer->second.ready = true;
15384 pendingBuffer->second.buffer = buffer;
15385
15386 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15387 pendingBuffer = buffers->second.begin();
15388 while (pendingBuffer != buffers->second.end()) {
15389 if (!pendingBuffer->second.ready) {
15390 // If this buffer is not ready, the following buffers can't be sent.
15391 break;
15392 }
15393
15394 camera3_capture_result_t result = {};
15395 result.frame_number = pendingBuffer->first;
15396 result.num_output_buffers = 1;
15397 result.output_buffers = &pendingBuffer->second.buffer;
15398
15399 // Send out result with buffer errors.
15400 mParent->orchestrateResult(&result);
15401
15402 pendingBuffer = buffers->second.erase(pendingBuffer);
15403 }
15404}
15405
15406void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15407{
15408 std::lock_guard<std::mutex> lock(mLock);
15409
15410 // Log errors for stale buffers.
15411 for (auto &buffers : mStreamBuffers) {
15412 for (auto &buffer : buffers.second) {
15413 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15414 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15415 }
15416 buffers.second.clear();
15417 }
15418
15419 if (clearConfiguredStreams) {
15420 mStreamBuffers.clear();
15421 }
15422}
15423
Thierry Strudel3d639192016-09-09 11:52:26 -070015424}; //end namespace qcamera