blob: 970b2e3b5bbcb2f36550b9ea11c20d9c13bc3303 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700136// Whether to check for the GPU stride padding, or use the default
137//#define CHECK_GPU_PIXEL_ALIGNMENT
138
Thierry Strudel3d639192016-09-09 11:52:26 -0700139cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
140const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
141extern pthread_mutex_t gCamLock;
142volatile uint32_t gCamHal3LogLevel = 1;
143extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700144
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800145// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146// The following Easel related variables must be protected by gHdrPlusClientLock.
147EaselManagerClient gEaselManagerClient;
148bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
149std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
150bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
157Mutex gHdrPlusClientLock; // Protect above Easel related variables.
158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800459 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mChannelHandle(0),
461 mFirstConfiguration(true),
462 mFlush(false),
463 mFlushPerf(false),
464 mParamHeap(NULL),
465 mParameters(NULL),
466 mPrevParameters(NULL),
467 m_bIsVideo(false),
468 m_bIs4KVideo(false),
469 m_bEisSupportedSize(false),
470 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800471 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700473 mShutterDispatcher(this),
474 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 mMinProcessedFrameDuration(0),
476 mMinJpegFrameDuration(0),
477 mMinRawFrameDuration(0),
478 mMetaFrameCount(0U),
479 mUpdateDebugLevel(false),
480 mCallbacks(callbacks),
481 mCaptureIntent(0),
482 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700483 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800484 /* DevCamDebug metadata internal m control*/
485 mDevCamDebugMetaEnable(0),
486 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mBatchSize(0),
488 mToBeQueuedVidBufs(0),
489 mHFRVideoFps(DEFAULT_VIDEO_FPS),
490 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800491 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800492 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mFirstFrameNumberInBatch(0),
494 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800495 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700496 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
497 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000498 mPDSupported(false),
499 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700500 mInstantAEC(false),
501 mResetInstantAEC(false),
502 mInstantAECSettledFrameNumber(0),
503 mAecSkipDisplayFrameBound(0),
504 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800505 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700506 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700508 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mState(CLOSED),
510 mIsDeviceLinked(false),
511 mIsMainCamera(true),
512 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700513 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800515 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700516 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800517 mIsApInputUsedForHdrPlus(false),
518 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700520{
521 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCommon.init(gCamCapability[cameraId]);
523 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700524#ifndef USE_HAL_3_3
525 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
526#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCameraDevice.common.close = close_camera_device;
530 mCameraDevice.ops = &mCameraOps;
531 mCameraDevice.priv = this;
532 gCamCapability[cameraId]->version = CAM_HAL_V3;
533 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
534 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
535 gCamCapability[cameraId]->min_num_pp_bufs = 3;
536
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800537 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700538
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800539 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700540 mPendingLiveRequest = 0;
541 mCurrentRequestId = -1;
542 pthread_mutex_init(&mMutex, NULL);
543
544 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
545 mDefaultMetadata[i] = NULL;
546
547 // Getting system props of different kinds
548 char prop[PROPERTY_VALUE_MAX];
549 memset(prop, 0, sizeof(prop));
550 property_get("persist.camera.raw.dump", prop, "0");
551 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800552 property_get("persist.camera.hal3.force.hdr", prop, "0");
553 mForceHdrSnapshot = atoi(prop);
554
Thierry Strudel3d639192016-09-09 11:52:26 -0700555 if (mEnableRawDump)
556 LOGD("Raw dump from Camera HAL enabled");
557
558 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
559 memset(mLdafCalib, 0, sizeof(mLdafCalib));
560
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.tnr.preview", prop, "0");
563 m_bTnrPreview = (uint8_t)atoi(prop);
564
565 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800566 property_get("persist.camera.swtnr.preview", prop, "1");
567 m_bSwTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700570 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 m_bTnrVideo = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.avtimer.debug", prop, "0");
575 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700577
Thierry Strudel54dc9782017-02-15 12:12:10 -0800578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.cacmode.disable", prop, "0");
580 m_cacModeDisabled = (uint8_t)atoi(prop);
581
Thierry Strudel3d639192016-09-09 11:52:26 -0700582 //Load and read GPU library.
583 lib_surface_utils = NULL;
584 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700585 mSurfaceStridePadding = CAM_PAD_TO_64;
586#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
588 if (lib_surface_utils) {
589 *(void **)&LINK_get_surface_pixel_alignment =
590 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
591 if (LINK_get_surface_pixel_alignment) {
592 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
593 }
594 dlclose(lib_surface_utils);
595 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700596#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000597 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
598 mPDSupported = (0 <= mPDIndex) ? true : false;
599
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700600 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700601}
602
603/*===========================================================================
604 * FUNCTION : ~QCamera3HardwareInterface
605 *
606 * DESCRIPTION: destructor of QCamera3HardwareInterface
607 *
608 * PARAMETERS : none
609 *
610 * RETURN : none
611 *==========================================================================*/
612QCamera3HardwareInterface::~QCamera3HardwareInterface()
613{
614 LOGD("E");
615
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800616 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700617
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800618 // Disable power hint and enable the perf lock for close camera
619 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
620 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
621
622 // unlink of dualcam during close camera
623 if (mIsDeviceLinked) {
624 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
625 &m_pDualCamCmdPtr->bundle_info;
626 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
627 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
628 pthread_mutex_lock(&gCamLock);
629
630 if (mIsMainCamera == 1) {
631 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 // related session id should be session id of linked session
635 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
636 } else {
637 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
638 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
639 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800642 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800643 pthread_mutex_unlock(&gCamLock);
644
645 rc = mCameraHandle->ops->set_dual_cam_cmd(
646 mCameraHandle->camera_handle);
647 if (rc < 0) {
648 LOGE("Dualcam: Unlink failed, but still proceed to close");
649 }
650 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700651
652 /* We need to stop all streams before deleting any stream */
653 if (mRawDumpChannel) {
654 mRawDumpChannel->stop();
655 }
656
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700657 if (mHdrPlusRawSrcChannel) {
658 mHdrPlusRawSrcChannel->stop();
659 }
660
Thierry Strudel3d639192016-09-09 11:52:26 -0700661 // NOTE: 'camera3_stream_t *' objects are already freed at
662 // this stage by the framework
663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664 it != mStreamInfo.end(); it++) {
665 QCamera3ProcessingChannel *channel = (*it)->channel;
666 if (channel) {
667 channel->stop();
668 }
669 }
670 if (mSupportChannel)
671 mSupportChannel->stop();
672
673 if (mAnalysisChannel) {
674 mAnalysisChannel->stop();
675 }
676 if (mMetadataChannel) {
677 mMetadataChannel->stop();
678 }
679 if (mChannelHandle) {
680 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
681 mChannelHandle);
682 LOGD("stopping channel %d", mChannelHandle);
683 }
684
685 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
686 it != mStreamInfo.end(); it++) {
687 QCamera3ProcessingChannel *channel = (*it)->channel;
688 if (channel)
689 delete channel;
690 free (*it);
691 }
692 if (mSupportChannel) {
693 delete mSupportChannel;
694 mSupportChannel = NULL;
695 }
696
697 if (mAnalysisChannel) {
698 delete mAnalysisChannel;
699 mAnalysisChannel = NULL;
700 }
701 if (mRawDumpChannel) {
702 delete mRawDumpChannel;
703 mRawDumpChannel = NULL;
704 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700705 if (mHdrPlusRawSrcChannel) {
706 delete mHdrPlusRawSrcChannel;
707 mHdrPlusRawSrcChannel = NULL;
708 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700709 if (mDummyBatchChannel) {
710 delete mDummyBatchChannel;
711 mDummyBatchChannel = NULL;
712 }
713
714 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800715 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700716
717 if (mMetadataChannel) {
718 delete mMetadataChannel;
719 mMetadataChannel = NULL;
720 }
721
722 /* Clean up all channels */
723 if (mCameraInitialized) {
724 if(!mFirstConfiguration){
725 //send the last unconfigure
726 cam_stream_size_info_t stream_config_info;
727 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
728 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
729 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800730 m_bIs4KVideo ? 0 :
731 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700732 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
734 stream_config_info);
735 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
736 if (rc < 0) {
737 LOGE("set_parms failed for unconfigure");
738 }
739 }
740 deinitParameters();
741 }
742
743 if (mChannelHandle) {
744 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
745 mChannelHandle);
746 LOGH("deleting channel %d", mChannelHandle);
747 mChannelHandle = 0;
748 }
749
750 if (mState != CLOSED)
751 closeCamera();
752
753 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
754 req.mPendingBufferList.clear();
755 }
756 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700757 for (pendingRequestIterator i = mPendingRequestsList.begin();
758 i != mPendingRequestsList.end();) {
759 i = erasePendingRequest(i);
760 }
761 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
762 if (mDefaultMetadata[i])
763 free_camera_metadata(mDefaultMetadata[i]);
764
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800765 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700766
767 pthread_cond_destroy(&mRequestCond);
768
769 pthread_cond_destroy(&mBuffersCond);
770
771 pthread_mutex_destroy(&mMutex);
772 LOGD("X");
773}
774
775/*===========================================================================
776 * FUNCTION : erasePendingRequest
777 *
778 * DESCRIPTION: function to erase a desired pending request after freeing any
779 * allocated memory
780 *
781 * PARAMETERS :
782 * @i : iterator pointing to pending request to be erased
783 *
784 * RETURN : iterator pointing to the next request
785 *==========================================================================*/
786QCamera3HardwareInterface::pendingRequestIterator
787 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
788{
789 if (i->input_buffer != NULL) {
790 free(i->input_buffer);
791 i->input_buffer = NULL;
792 }
793 if (i->settings != NULL)
794 free_camera_metadata((camera_metadata_t*)i->settings);
795 return mPendingRequestsList.erase(i);
796}
797
798/*===========================================================================
799 * FUNCTION : camEvtHandle
800 *
801 * DESCRIPTION: Function registered to mm-camera-interface to handle events
802 *
803 * PARAMETERS :
804 * @camera_handle : interface layer camera handle
805 * @evt : ptr to event
806 * @user_data : user data ptr
807 *
808 * RETURN : none
809 *==========================================================================*/
810void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
811 mm_camera_event_t *evt,
812 void *user_data)
813{
814 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
815 if (obj && evt) {
816 switch(evt->server_event_type) {
817 case CAM_EVENT_TYPE_DAEMON_DIED:
818 pthread_mutex_lock(&obj->mMutex);
819 obj->mState = ERROR;
820 pthread_mutex_unlock(&obj->mMutex);
821 LOGE("Fatal, camera daemon died");
822 break;
823
824 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
825 LOGD("HAL got request pull from Daemon");
826 pthread_mutex_lock(&obj->mMutex);
827 obj->mWokenUpByDaemon = true;
828 obj->unblockRequestIfNecessary();
829 pthread_mutex_unlock(&obj->mMutex);
830 break;
831
832 default:
833 LOGW("Warning: Unhandled event %d",
834 evt->server_event_type);
835 break;
836 }
837 } else {
838 LOGE("NULL user_data/evt");
839 }
840}
841
842/*===========================================================================
843 * FUNCTION : openCamera
844 *
845 * DESCRIPTION: open camera
846 *
847 * PARAMETERS :
848 * @hw_device : double ptr for camera device struct
849 *
850 * RETURN : int32_t type of status
851 * NO_ERROR -- success
852 * none-zero failure code
853 *==========================================================================*/
854int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
855{
856 int rc = 0;
857 if (mState != CLOSED) {
858 *hw_device = NULL;
859 return PERMISSION_DENIED;
860 }
861
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700862 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
865 mCameraId);
866
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700867 if (mCameraHandle) {
868 LOGE("Failure: Camera already opened");
869 return ALREADY_EXISTS;
870 }
871
872 {
873 Mutex::Autolock l(gHdrPlusClientLock);
874 if (gEaselManagerClient.isEaselPresentOnDevice()) {
875 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
876 rc = gEaselManagerClient.resume();
877 if (rc != 0) {
878 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
879 return rc;
880 }
881 }
882 }
883
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 rc = openCamera();
885 if (rc == 0) {
886 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800887 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700889
890 // Suspend Easel because opening camera failed.
891 {
892 Mutex::Autolock l(gHdrPlusClientLock);
893 if (gEaselManagerClient.isEaselPresentOnDevice()) {
894 status_t suspendErr = gEaselManagerClient.suspend();
895 if (suspendErr != 0) {
896 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
897 strerror(-suspendErr), suspendErr);
898 }
899 }
900 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800901 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
904 mCameraId, rc);
905
906 if (rc == NO_ERROR) {
907 mState = OPENED;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 return rc;
911}
912
913/*===========================================================================
914 * FUNCTION : openCamera
915 *
916 * DESCRIPTION: open camera
917 *
918 * PARAMETERS : none
919 *
920 * RETURN : int32_t type of status
921 * NO_ERROR -- success
922 * none-zero failure code
923 *==========================================================================*/
924int QCamera3HardwareInterface::openCamera()
925{
926 int rc = 0;
927 char value[PROPERTY_VALUE_MAX];
928
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800929 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800930
Thierry Strudel3d639192016-09-09 11:52:26 -0700931 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
932 if (rc < 0) {
933 LOGE("Failed to reserve flash for camera id: %d",
934 mCameraId);
935 return UNKNOWN_ERROR;
936 }
937
938 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
939 if (rc) {
940 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
941 return rc;
942 }
943
944 if (!mCameraHandle) {
945 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
946 return -ENODEV;
947 }
948
949 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
950 camEvtHandle, (void *)this);
951
952 if (rc < 0) {
953 LOGE("Error, failed to register event callback");
954 /* Not closing camera here since it is already handled in destructor */
955 return FAILED_TRANSACTION;
956 }
957
958 mExifParams.debug_params =
959 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
960 if (mExifParams.debug_params) {
961 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
962 } else {
963 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
964 return NO_MEMORY;
965 }
966 mFirstConfiguration = true;
967
968 //Notify display HAL that a camera session is active.
969 //But avoid calling the same during bootup because camera service might open/close
970 //cameras at boot time during its initialization and display service will also internally
971 //wait for camera service to initialize first while calling this display API, resulting in a
972 //deadlock situation. Since boot time camera open/close calls are made only to fetch
973 //capabilities, no need of this display bw optimization.
974 //Use "service.bootanim.exit" property to know boot status.
975 property_get("service.bootanim.exit", value, "0");
976 if (atoi(value) == 1) {
977 pthread_mutex_lock(&gCamLock);
978 if (gNumCameraSessions++ == 0) {
979 setCameraLaunchStatus(true);
980 }
981 pthread_mutex_unlock(&gCamLock);
982 }
983
984 //fill the session id needed while linking dual cam
985 pthread_mutex_lock(&gCamLock);
986 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
987 &sessionId[mCameraId]);
988 pthread_mutex_unlock(&gCamLock);
989
990 if (rc < 0) {
991 LOGE("Error, failed to get sessiion id");
992 return UNKNOWN_ERROR;
993 } else {
994 //Allocate related cam sync buffer
995 //this is needed for the payload that goes along with bundling cmd for related
996 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
998 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 if(rc != OK) {
1000 rc = NO_MEMORY;
1001 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1002 return NO_MEMORY;
1003 }
1004
1005 //Map memory for related cam sync buffer
1006 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001007 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1008 m_pDualCamCmdHeap->getFd(0),
1009 sizeof(cam_dual_camera_cmd_info_t),
1010 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001011 if(rc < 0) {
1012 LOGE("Dualcam: failed to map Related cam sync buffer");
1013 rc = FAILED_TRANSACTION;
1014 return NO_MEMORY;
1015 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001016 m_pDualCamCmdPtr =
1017 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 }
1019
1020 LOGH("mCameraId=%d",mCameraId);
1021
1022 return NO_ERROR;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : closeCamera
1027 *
1028 * DESCRIPTION: close camera
1029 *
1030 * PARAMETERS : none
1031 *
1032 * RETURN : int32_t type of status
1033 * NO_ERROR -- success
1034 * none-zero failure code
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::closeCamera()
1037{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001038 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001039 int rc = NO_ERROR;
1040 char value[PROPERTY_VALUE_MAX];
1041
1042 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1043 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001044
1045 // unmap memory for related cam sync buffer
1046 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001047 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001048 if (NULL != m_pDualCamCmdHeap) {
1049 m_pDualCamCmdHeap->deallocate();
1050 delete m_pDualCamCmdHeap;
1051 m_pDualCamCmdHeap = NULL;
1052 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1056 mCameraHandle = NULL;
1057
1058 //reset session id to some invalid id
1059 pthread_mutex_lock(&gCamLock);
1060 sessionId[mCameraId] = 0xDEADBEEF;
1061 pthread_mutex_unlock(&gCamLock);
1062
1063 //Notify display HAL that there is no active camera session
1064 //but avoid calling the same during bootup. Refer to openCamera
1065 //for more details.
1066 property_get("service.bootanim.exit", value, "0");
1067 if (atoi(value) == 1) {
1068 pthread_mutex_lock(&gCamLock);
1069 if (--gNumCameraSessions == 0) {
1070 setCameraLaunchStatus(false);
1071 }
1072 pthread_mutex_unlock(&gCamLock);
1073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 if (mExifParams.debug_params) {
1076 free(mExifParams.debug_params);
1077 mExifParams.debug_params = NULL;
1078 }
1079 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1080 LOGW("Failed to release flash for camera id: %d",
1081 mCameraId);
1082 }
1083 mState = CLOSED;
1084 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1085 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001087 {
1088 Mutex::Autolock l(gHdrPlusClientLock);
1089 if (gHdrPlusClient != nullptr) {
1090 // Disable HDR+ mode.
1091 disableHdrPlusModeLocked();
1092 // Disconnect Easel if it's connected.
1093 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1094 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001095 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001096
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001097 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001098 rc = gEaselManagerClient.stopMipi(mCameraId);
1099 if (rc != 0) {
1100 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1101 }
1102
1103 rc = gEaselManagerClient.suspend();
1104 if (rc != 0) {
1105 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001107 }
1108 }
1109
Thierry Strudel3d639192016-09-09 11:52:26 -07001110 return rc;
1111}
1112
1113/*===========================================================================
1114 * FUNCTION : initialize
1115 *
1116 * DESCRIPTION: Initialize frameworks callback functions
1117 *
1118 * PARAMETERS :
1119 * @callback_ops : callback function to frameworks
1120 *
1121 * RETURN :
1122 *
1123 *==========================================================================*/
1124int QCamera3HardwareInterface::initialize(
1125 const struct camera3_callback_ops *callback_ops)
1126{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001127 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001128 int rc;
1129
1130 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1131 pthread_mutex_lock(&mMutex);
1132
1133 // Validate current state
1134 switch (mState) {
1135 case OPENED:
1136 /* valid state */
1137 break;
1138 default:
1139 LOGE("Invalid state %d", mState);
1140 rc = -ENODEV;
1141 goto err1;
1142 }
1143
1144 rc = initParameters();
1145 if (rc < 0) {
1146 LOGE("initParamters failed %d", rc);
1147 goto err1;
1148 }
1149 mCallbackOps = callback_ops;
1150
1151 mChannelHandle = mCameraHandle->ops->add_channel(
1152 mCameraHandle->camera_handle, NULL, NULL, this);
1153 if (mChannelHandle == 0) {
1154 LOGE("add_channel failed");
1155 rc = -ENOMEM;
1156 pthread_mutex_unlock(&mMutex);
1157 return rc;
1158 }
1159
1160 pthread_mutex_unlock(&mMutex);
1161 mCameraInitialized = true;
1162 mState = INITIALIZED;
1163 LOGI("X");
1164 return 0;
1165
1166err1:
1167 pthread_mutex_unlock(&mMutex);
1168 return rc;
1169}
1170
1171/*===========================================================================
1172 * FUNCTION : validateStreamDimensions
1173 *
1174 * DESCRIPTION: Check if the configuration requested are those advertised
1175 *
1176 * PARAMETERS :
1177 * @stream_list : streams to be configured
1178 *
1179 * RETURN :
1180 *
1181 *==========================================================================*/
1182int QCamera3HardwareInterface::validateStreamDimensions(
1183 camera3_stream_configuration_t *streamList)
1184{
1185 int rc = NO_ERROR;
1186 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001187 uint32_t depthWidth = 0;
1188 uint32_t depthHeight = 0;
1189 if (mPDSupported) {
1190 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1191 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001193
1194 camera3_stream_t *inputStream = NULL;
1195 /*
1196 * Loop through all streams to find input stream if it exists*
1197 */
1198 for (size_t i = 0; i< streamList->num_streams; i++) {
1199 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1200 if (inputStream != NULL) {
1201 LOGE("Error, Multiple input streams requested");
1202 return -EINVAL;
1203 }
1204 inputStream = streamList->streams[i];
1205 }
1206 }
1207 /*
1208 * Loop through all streams requested in configuration
1209 * Check if unsupported sizes have been requested on any of them
1210 */
1211 for (size_t j = 0; j < streamList->num_streams; j++) {
1212 bool sizeFound = false;
1213 camera3_stream_t *newStream = streamList->streams[j];
1214
1215 uint32_t rotatedHeight = newStream->height;
1216 uint32_t rotatedWidth = newStream->width;
1217 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1218 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1219 rotatedHeight = newStream->width;
1220 rotatedWidth = newStream->height;
1221 }
1222
1223 /*
1224 * Sizes are different for each type of stream format check against
1225 * appropriate table.
1226 */
1227 switch (newStream->format) {
1228 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1229 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1230 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1232 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1233 mPDSupported) {
1234 if ((depthWidth == newStream->width) &&
1235 (depthHeight == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1241 for (size_t i = 0; i < count; i++) {
1242 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1243 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1244 sizeFound = true;
1245 break;
1246 }
1247 }
1248 break;
1249 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001250 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1251 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001252 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001254 if ((depthSamplesCount == newStream->width) &&
1255 (1 == newStream->height)) {
1256 sizeFound = true;
1257 }
1258 break;
1259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1261 /* Verify set size against generated sizes table */
1262 for (size_t i = 0; i < count; i++) {
1263 if (((int32_t)rotatedWidth ==
1264 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1265 ((int32_t)rotatedHeight ==
1266 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1267 sizeFound = true;
1268 break;
1269 }
1270 }
1271 break;
1272 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1274 default:
1275 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1276 || newStream->stream_type == CAMERA3_STREAM_INPUT
1277 || IS_USAGE_ZSL(newStream->usage)) {
1278 if (((int32_t)rotatedWidth ==
1279 gCamCapability[mCameraId]->active_array_size.width) &&
1280 ((int32_t)rotatedHeight ==
1281 gCamCapability[mCameraId]->active_array_size.height)) {
1282 sizeFound = true;
1283 break;
1284 }
1285 /* We could potentially break here to enforce ZSL stream
1286 * set from frameworks always is full active array size
1287 * but it is not clear from the spc if framework will always
1288 * follow that, also we have logic to override to full array
1289 * size, so keeping the logic lenient at the moment
1290 */
1291 }
1292 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1293 MAX_SIZES_CNT);
1294 for (size_t i = 0; i < count; i++) {
1295 if (((int32_t)rotatedWidth ==
1296 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1297 ((int32_t)rotatedHeight ==
1298 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1299 sizeFound = true;
1300 break;
1301 }
1302 }
1303 break;
1304 } /* End of switch(newStream->format) */
1305
1306 /* We error out even if a single stream has unsupported size set */
1307 if (!sizeFound) {
1308 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1309 rotatedWidth, rotatedHeight, newStream->format,
1310 gCamCapability[mCameraId]->active_array_size.width,
1311 gCamCapability[mCameraId]->active_array_size.height);
1312 rc = -EINVAL;
1313 break;
1314 }
1315 } /* End of for each stream */
1316 return rc;
1317}
1318
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001319/*===========================================================================
1320 * FUNCTION : validateUsageFlags
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlags(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1339 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1340 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1341 continue;
1342 }
1343
1344 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1345 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1346 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1347 bool forcePreviewUBWC = true;
1348 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1349 forcePreviewUBWC = false;
1350 }
1351 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1352 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1353 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1354 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1355 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1356 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1357
1358 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1359 // So color spaces will always match.
1360
1361 // Check whether underlying formats of shared streams match.
1362 if (isVideo && isPreview && videoFormat != previewFormat) {
1363 LOGE("Combined video and preview usage flag is not supported");
1364 return -EINVAL;
1365 }
1366 if (isPreview && isZSL && previewFormat != zslFormat) {
1367 LOGE("Combined preview and zsl usage flag is not supported");
1368 return -EINVAL;
1369 }
1370 if (isVideo && isZSL && videoFormat != zslFormat) {
1371 LOGE("Combined video and zsl usage flag is not supported");
1372 return -EINVAL;
1373 }
1374 }
1375 return NO_ERROR;
1376}
1377
1378/*===========================================================================
1379 * FUNCTION : validateUsageFlagsForEis
1380 *
1381 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1382 *
1383 * PARAMETERS :
1384 * @stream_list : streams to be configured
1385 *
1386 * RETURN :
1387 * NO_ERROR if the usage flags are supported
1388 * error code if usage flags are not supported
1389 *
1390 *==========================================================================*/
1391int QCamera3HardwareInterface::validateUsageFlagsForEis(
1392 const camera3_stream_configuration_t* streamList)
1393{
1394 for (size_t j = 0; j < streamList->num_streams; j++) {
1395 const camera3_stream_t *newStream = streamList->streams[j];
1396
1397 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1398 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1399
1400 // Because EIS is "hard-coded" for certain use case, and current
1401 // implementation doesn't support shared preview and video on the same
1402 // stream, return failure if EIS is forced on.
1403 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1404 LOGE("Combined video and preview usage flag is not supported due to EIS");
1405 return -EINVAL;
1406 }
1407 }
1408 return NO_ERROR;
1409}
1410
Thierry Strudel3d639192016-09-09 11:52:26 -07001411/*==============================================================================
1412 * FUNCTION : isSupportChannelNeeded
1413 *
1414 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1415 *
1416 * PARAMETERS :
1417 * @stream_list : streams to be configured
1418 * @stream_config_info : the config info for streams to be configured
1419 *
1420 * RETURN : Boolen true/false decision
1421 *
1422 *==========================================================================*/
1423bool QCamera3HardwareInterface::isSupportChannelNeeded(
1424 camera3_stream_configuration_t *streamList,
1425 cam_stream_size_info_t stream_config_info)
1426{
1427 uint32_t i;
1428 bool pprocRequested = false;
1429 /* Check for conditions where PProc pipeline does not have any streams*/
1430 for (i = 0; i < stream_config_info.num_streams; i++) {
1431 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1432 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1433 pprocRequested = true;
1434 break;
1435 }
1436 }
1437
1438 if (pprocRequested == false )
1439 return true;
1440
1441 /* Dummy stream needed if only raw or jpeg streams present */
1442 for (i = 0; i < streamList->num_streams; i++) {
1443 switch(streamList->streams[i]->format) {
1444 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1445 case HAL_PIXEL_FORMAT_RAW10:
1446 case HAL_PIXEL_FORMAT_RAW16:
1447 case HAL_PIXEL_FORMAT_BLOB:
1448 break;
1449 default:
1450 return false;
1451 }
1452 }
1453 return true;
1454}
1455
1456/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001457 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001459 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001460 *
1461 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001462 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001463 *
1464 * RETURN : int32_t type of status
1465 * NO_ERROR -- success
1466 * none-zero failure code
1467 *
1468 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001470{
1471 int32_t rc = NO_ERROR;
1472
1473 cam_dimension_t max_dim = {0, 0};
1474 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1475 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1476 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1477 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1478 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1479 }
1480
1481 clear_metadata_buffer(mParameters);
1482
1483 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1484 max_dim);
1485 if (rc != NO_ERROR) {
1486 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1487 return rc;
1488 }
1489
1490 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1491 if (rc != NO_ERROR) {
1492 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1493 return rc;
1494 }
1495
1496 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001497 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001498
1499 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1500 mParameters);
1501 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001502 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001503 return rc;
1504 }
1505
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001506 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001507 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1508 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1509 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1510 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1511 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001512
1513 return rc;
1514}
1515
1516/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 * FUNCTION : addToPPFeatureMask
1518 *
1519 * DESCRIPTION: add additional features to pp feature mask based on
1520 * stream type and usecase
1521 *
1522 * PARAMETERS :
1523 * @stream_format : stream type for feature mask
1524 * @stream_idx : stream idx within postprocess_mask list to change
1525 *
1526 * RETURN : NULL
1527 *
1528 *==========================================================================*/
1529void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1530 uint32_t stream_idx)
1531{
1532 char feature_mask_value[PROPERTY_VALUE_MAX];
1533 cam_feature_mask_t feature_mask;
1534 int args_converted;
1535 int property_len;
1536
1537 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001538#ifdef _LE_CAMERA_
1539 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1540 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1541 property_len = property_get("persist.camera.hal3.feature",
1542 feature_mask_value, swtnr_feature_mask_value);
1543#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001544 property_len = property_get("persist.camera.hal3.feature",
1545 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001546#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001547 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1548 (feature_mask_value[1] == 'x')) {
1549 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1550 } else {
1551 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1552 }
1553 if (1 != args_converted) {
1554 feature_mask = 0;
1555 LOGE("Wrong feature mask %s", feature_mask_value);
1556 return;
1557 }
1558
1559 switch (stream_format) {
1560 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1561 /* Add LLVD to pp feature mask only if video hint is enabled */
1562 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1563 mStreamConfigInfo.postprocess_mask[stream_idx]
1564 |= CAM_QTI_FEATURE_SW_TNR;
1565 LOGH("Added SW TNR to pp feature mask");
1566 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1567 mStreamConfigInfo.postprocess_mask[stream_idx]
1568 |= CAM_QCOM_FEATURE_LLVD;
1569 LOGH("Added LLVD SeeMore to pp feature mask");
1570 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001571 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1572 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1573 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1574 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001575 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1576 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1577 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1578 CAM_QTI_FEATURE_BINNING_CORRECTION;
1579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 break;
1581 }
1582 default:
1583 break;
1584 }
1585 LOGD("PP feature mask %llx",
1586 mStreamConfigInfo.postprocess_mask[stream_idx]);
1587}
1588
1589/*==============================================================================
1590 * FUNCTION : updateFpsInPreviewBuffer
1591 *
1592 * DESCRIPTION: update FPS information in preview buffer.
1593 *
1594 * PARAMETERS :
1595 * @metadata : pointer to metadata buffer
1596 * @frame_number: frame_number to look for in pending buffer list
1597 *
1598 * RETURN : None
1599 *
1600 *==========================================================================*/
1601void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1602 uint32_t frame_number)
1603{
1604 // Mark all pending buffers for this particular request
1605 // with corresponding framerate information
1606 for (List<PendingBuffersInRequest>::iterator req =
1607 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1608 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1609 for(List<PendingBufferInfo>::iterator j =
1610 req->mPendingBufferList.begin();
1611 j != req->mPendingBufferList.end(); j++) {
1612 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1613 if ((req->frame_number == frame_number) &&
1614 (channel->getStreamTypeMask() &
1615 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1616 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1617 CAM_INTF_PARM_FPS_RANGE, metadata) {
1618 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1619 struct private_handle_t *priv_handle =
1620 (struct private_handle_t *)(*(j->buffer));
1621 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1622 }
1623 }
1624 }
1625 }
1626}
1627
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001628/*==============================================================================
1629 * FUNCTION : updateTimeStampInPendingBuffers
1630 *
1631 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1632 * of a frame number
1633 *
1634 * PARAMETERS :
1635 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1636 * @timestamp : timestamp to be set
1637 *
1638 * RETURN : None
1639 *
1640 *==========================================================================*/
1641void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1642 uint32_t frameNumber, nsecs_t timestamp)
1643{
1644 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1645 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1646 if (req->frame_number != frameNumber)
1647 continue;
1648
1649 for (auto k = req->mPendingBufferList.begin();
1650 k != req->mPendingBufferList.end(); k++ ) {
1651 struct private_handle_t *priv_handle =
1652 (struct private_handle_t *) (*(k->buffer));
1653 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1654 }
1655 }
1656 return;
1657}
1658
Thierry Strudel3d639192016-09-09 11:52:26 -07001659/*===========================================================================
1660 * FUNCTION : configureStreams
1661 *
1662 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1663 * and output streams.
1664 *
1665 * PARAMETERS :
1666 * @stream_list : streams to be configured
1667 *
1668 * RETURN :
1669 *
1670 *==========================================================================*/
1671int QCamera3HardwareInterface::configureStreams(
1672 camera3_stream_configuration_t *streamList)
1673{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001674 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001675 int rc = 0;
1676
1677 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001678 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001679 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001680 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681
1682 return rc;
1683}
1684
1685/*===========================================================================
1686 * FUNCTION : configureStreamsPerfLocked
1687 *
1688 * DESCRIPTION: configureStreams while perfLock is held.
1689 *
1690 * PARAMETERS :
1691 * @stream_list : streams to be configured
1692 *
1693 * RETURN : int32_t type of status
1694 * NO_ERROR -- success
1695 * none-zero failure code
1696 *==========================================================================*/
1697int QCamera3HardwareInterface::configureStreamsPerfLocked(
1698 camera3_stream_configuration_t *streamList)
1699{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001701 int rc = 0;
1702
1703 // Sanity check stream_list
1704 if (streamList == NULL) {
1705 LOGE("NULL stream configuration");
1706 return BAD_VALUE;
1707 }
1708 if (streamList->streams == NULL) {
1709 LOGE("NULL stream list");
1710 return BAD_VALUE;
1711 }
1712
1713 if (streamList->num_streams < 1) {
1714 LOGE("Bad number of streams requested: %d",
1715 streamList->num_streams);
1716 return BAD_VALUE;
1717 }
1718
1719 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1720 LOGE("Maximum number of streams %d exceeded: %d",
1721 MAX_NUM_STREAMS, streamList->num_streams);
1722 return BAD_VALUE;
1723 }
1724
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001725 rc = validateUsageFlags(streamList);
1726 if (rc != NO_ERROR) {
1727 return rc;
1728 }
1729
Thierry Strudel3d639192016-09-09 11:52:26 -07001730 mOpMode = streamList->operation_mode;
1731 LOGD("mOpMode: %d", mOpMode);
1732
1733 /* first invalidate all the steams in the mStreamList
1734 * if they appear again, they will be validated */
1735 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1736 it != mStreamInfo.end(); it++) {
1737 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1738 if (channel) {
1739 channel->stop();
1740 }
1741 (*it)->status = INVALID;
1742 }
1743
1744 if (mRawDumpChannel) {
1745 mRawDumpChannel->stop();
1746 delete mRawDumpChannel;
1747 mRawDumpChannel = NULL;
1748 }
1749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001750 if (mHdrPlusRawSrcChannel) {
1751 mHdrPlusRawSrcChannel->stop();
1752 delete mHdrPlusRawSrcChannel;
1753 mHdrPlusRawSrcChannel = NULL;
1754 }
1755
Thierry Strudel3d639192016-09-09 11:52:26 -07001756 if (mSupportChannel)
1757 mSupportChannel->stop();
1758
1759 if (mAnalysisChannel) {
1760 mAnalysisChannel->stop();
1761 }
1762 if (mMetadataChannel) {
1763 /* If content of mStreamInfo is not 0, there is metadata stream */
1764 mMetadataChannel->stop();
1765 }
1766 if (mChannelHandle) {
1767 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1768 mChannelHandle);
1769 LOGD("stopping channel %d", mChannelHandle);
1770 }
1771
1772 pthread_mutex_lock(&mMutex);
1773
1774 // Check state
1775 switch (mState) {
1776 case INITIALIZED:
1777 case CONFIGURED:
1778 case STARTED:
1779 /* valid state */
1780 break;
1781 default:
1782 LOGE("Invalid state %d", mState);
1783 pthread_mutex_unlock(&mMutex);
1784 return -ENODEV;
1785 }
1786
1787 /* Check whether we have video stream */
1788 m_bIs4KVideo = false;
1789 m_bIsVideo = false;
1790 m_bEisSupportedSize = false;
1791 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001792 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001793 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001794 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001795 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 uint32_t videoWidth = 0U;
1797 uint32_t videoHeight = 0U;
1798 size_t rawStreamCnt = 0;
1799 size_t stallStreamCnt = 0;
1800 size_t processedStreamCnt = 0;
1801 // Number of streams on ISP encoder path
1802 size_t numStreamsOnEncoder = 0;
1803 size_t numYuv888OnEncoder = 0;
1804 bool bYuv888OverrideJpeg = false;
1805 cam_dimension_t largeYuv888Size = {0, 0};
1806 cam_dimension_t maxViewfinderSize = {0, 0};
1807 bool bJpegExceeds4K = false;
1808 bool bJpegOnEncoder = false;
1809 bool bUseCommonFeatureMask = false;
1810 cam_feature_mask_t commonFeatureMask = 0;
1811 bool bSmallJpegSize = false;
1812 uint32_t width_ratio;
1813 uint32_t height_ratio;
1814 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1815 camera3_stream_t *inputStream = NULL;
1816 bool isJpeg = false;
1817 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001818 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001819 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001820
1821 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1822
1823 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 uint8_t eis_prop_set;
1825 uint32_t maxEisWidth = 0;
1826 uint32_t maxEisHeight = 0;
1827
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001828 // Initialize all instant AEC related variables
1829 mInstantAEC = false;
1830 mResetInstantAEC = false;
1831 mInstantAECSettledFrameNumber = 0;
1832 mAecSkipDisplayFrameBound = 0;
1833 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001834 mCurrFeatureState = 0;
1835 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001836
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1838
1839 size_t count = IS_TYPE_MAX;
1840 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1841 for (size_t i = 0; i < count; i++) {
1842 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001843 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1844 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 break;
1846 }
1847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001848
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001849 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001850 maxEisWidth = MAX_EIS_WIDTH;
1851 maxEisHeight = MAX_EIS_HEIGHT;
1852 }
1853
1854 /* EIS setprop control */
1855 char eis_prop[PROPERTY_VALUE_MAX];
1856 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001857 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001858 eis_prop_set = (uint8_t)atoi(eis_prop);
1859
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001860 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1862
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001863 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1864 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001865
Thierry Strudel3d639192016-09-09 11:52:26 -07001866 /* stream configurations */
1867 for (size_t i = 0; i < streamList->num_streams; i++) {
1868 camera3_stream_t *newStream = streamList->streams[i];
1869 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1870 "height = %d, rotation = %d, usage = 0x%x",
1871 i, newStream->stream_type, newStream->format,
1872 newStream->width, newStream->height, newStream->rotation,
1873 newStream->usage);
1874 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1875 newStream->stream_type == CAMERA3_STREAM_INPUT){
1876 isZsl = true;
1877 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 IS_USAGE_PREVIEW(newStream->usage)) {
1880 isPreview = true;
1881 }
1882
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1884 inputStream = newStream;
1885 }
1886
Emilian Peev7650c122017-01-19 08:24:33 -08001887 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1888 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 isJpeg = true;
1890 jpegSize.width = newStream->width;
1891 jpegSize.height = newStream->height;
1892 if (newStream->width > VIDEO_4K_WIDTH ||
1893 newStream->height > VIDEO_4K_HEIGHT)
1894 bJpegExceeds4K = true;
1895 }
1896
1897 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1898 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1899 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001900 // In HAL3 we can have multiple different video streams.
1901 // The variables video width and height are used below as
1902 // dimensions of the biggest of them
1903 if (videoWidth < newStream->width ||
1904 videoHeight < newStream->height) {
1905 videoWidth = newStream->width;
1906 videoHeight = newStream->height;
1907 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1909 (VIDEO_4K_HEIGHT <= newStream->height)) {
1910 m_bIs4KVideo = true;
1911 }
1912 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1913 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 }
1916 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1917 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1918 switch (newStream->format) {
1919 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001920 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1921 depthPresent = true;
1922 break;
1923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001924 stallStreamCnt++;
1925 if (isOnEncoder(maxViewfinderSize, newStream->width,
1926 newStream->height)) {
1927 numStreamsOnEncoder++;
1928 bJpegOnEncoder = true;
1929 }
1930 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1931 newStream->width);
1932 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1933 newStream->height);;
1934 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1935 "FATAL: max_downscale_factor cannot be zero and so assert");
1936 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1937 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1938 LOGH("Setting small jpeg size flag to true");
1939 bSmallJpegSize = true;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_RAW10:
1943 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1944 case HAL_PIXEL_FORMAT_RAW16:
1945 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001946 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1947 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1948 pdStatCount++;
1949 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 break;
1951 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1952 processedStreamCnt++;
1953 if (isOnEncoder(maxViewfinderSize, newStream->width,
1954 newStream->height)) {
1955 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1956 !IS_USAGE_ZSL(newStream->usage)) {
1957 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1958 }
1959 numStreamsOnEncoder++;
1960 }
1961 break;
1962 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 // If Yuv888 size is not greater than 4K, set feature mask
1967 // to SUPERSET so that it support concurrent request on
1968 // YUV and JPEG.
1969 if (newStream->width <= VIDEO_4K_WIDTH &&
1970 newStream->height <= VIDEO_4K_HEIGHT) {
1971 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1972 }
1973 numStreamsOnEncoder++;
1974 numYuv888OnEncoder++;
1975 largeYuv888Size.width = newStream->width;
1976 largeYuv888Size.height = newStream->height;
1977 }
1978 break;
1979 default:
1980 processedStreamCnt++;
1981 if (isOnEncoder(maxViewfinderSize, newStream->width,
1982 newStream->height)) {
1983 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1984 numStreamsOnEncoder++;
1985 }
1986 break;
1987 }
1988
1989 }
1990 }
1991
1992 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1993 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1994 !m_bIsVideo) {
1995 m_bEisEnable = false;
1996 }
1997
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001998 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Thierry Strudel54dc9782017-02-15 12:12:10 -08002003 uint8_t forceEnableTnr = 0;
2004 char tnr_prop[PROPERTY_VALUE_MAX];
2005 memset(tnr_prop, 0, sizeof(tnr_prop));
2006 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2007 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Logic to enable/disable TNR based on specific config size/etc.*/
2010 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2012 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002013 else if (forceEnableTnr)
2014 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002015
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002016 char videoHdrProp[PROPERTY_VALUE_MAX];
2017 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2018 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2019 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2020
2021 if (hdr_mode_prop == 1 && m_bIsVideo &&
2022 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2023 m_bVideoHdrEnabled = true;
2024 else
2025 m_bVideoHdrEnabled = false;
2026
2027
Thierry Strudel3d639192016-09-09 11:52:26 -07002028 /* Check if num_streams is sane */
2029 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2030 rawStreamCnt > MAX_RAW_STREAMS ||
2031 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2032 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2033 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2034 pthread_mutex_unlock(&mMutex);
2035 return -EINVAL;
2036 }
2037 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002038 if (isZsl && m_bIs4KVideo) {
2039 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002040 pthread_mutex_unlock(&mMutex);
2041 return -EINVAL;
2042 }
2043 /* Check if stream sizes are sane */
2044 if (numStreamsOnEncoder > 2) {
2045 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2046 pthread_mutex_unlock(&mMutex);
2047 return -EINVAL;
2048 } else if (1 < numStreamsOnEncoder){
2049 bUseCommonFeatureMask = true;
2050 LOGH("Multiple streams above max viewfinder size, common mask needed");
2051 }
2052
2053 /* Check if BLOB size is greater than 4k in 4k recording case */
2054 if (m_bIs4KVideo && bJpegExceeds4K) {
2055 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2056 pthread_mutex_unlock(&mMutex);
2057 return -EINVAL;
2058 }
2059
Emilian Peev7650c122017-01-19 08:24:33 -08002060 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2061 depthPresent) {
2062 LOGE("HAL doesn't support depth streams in HFR mode!");
2063 pthread_mutex_unlock(&mMutex);
2064 return -EINVAL;
2065 }
2066
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2068 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2069 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2070 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2071 // configurations:
2072 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2073 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2074 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2075 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2076 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2077 __func__);
2078 pthread_mutex_unlock(&mMutex);
2079 return -EINVAL;
2080 }
2081
2082 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2083 // the YUV stream's size is greater or equal to the JPEG size, set common
2084 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2085 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2086 jpegSize.width, jpegSize.height) &&
2087 largeYuv888Size.width > jpegSize.width &&
2088 largeYuv888Size.height > jpegSize.height) {
2089 bYuv888OverrideJpeg = true;
2090 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2091 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2092 }
2093
2094 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2095 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2096 commonFeatureMask);
2097 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2098 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2099
2100 rc = validateStreamDimensions(streamList);
2101 if (rc == NO_ERROR) {
2102 rc = validateStreamRotations(streamList);
2103 }
2104 if (rc != NO_ERROR) {
2105 LOGE("Invalid stream configuration requested!");
2106 pthread_mutex_unlock(&mMutex);
2107 return rc;
2108 }
2109
Emilian Peev0f3c3162017-03-15 12:57:46 +00002110 if (1 < pdStatCount) {
2111 LOGE("HAL doesn't support multiple PD streams");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
2116 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2117 (1 == pdStatCount)) {
2118 LOGE("HAL doesn't support PD streams in HFR mode!");
2119 pthread_mutex_unlock(&mMutex);
2120 return -EINVAL;
2121 }
2122
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2124 for (size_t i = 0; i < streamList->num_streams; i++) {
2125 camera3_stream_t *newStream = streamList->streams[i];
2126 LOGH("newStream type = %d, stream format = %d "
2127 "stream size : %d x %d, stream rotation = %d",
2128 newStream->stream_type, newStream->format,
2129 newStream->width, newStream->height, newStream->rotation);
2130 //if the stream is in the mStreamList validate it
2131 bool stream_exists = false;
2132 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2133 it != mStreamInfo.end(); it++) {
2134 if ((*it)->stream == newStream) {
2135 QCamera3ProcessingChannel *channel =
2136 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2137 stream_exists = true;
2138 if (channel)
2139 delete channel;
2140 (*it)->status = VALID;
2141 (*it)->stream->priv = NULL;
2142 (*it)->channel = NULL;
2143 }
2144 }
2145 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2146 //new stream
2147 stream_info_t* stream_info;
2148 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2149 if (!stream_info) {
2150 LOGE("Could not allocate stream info");
2151 rc = -ENOMEM;
2152 pthread_mutex_unlock(&mMutex);
2153 return rc;
2154 }
2155 stream_info->stream = newStream;
2156 stream_info->status = VALID;
2157 stream_info->channel = NULL;
2158 mStreamInfo.push_back(stream_info);
2159 }
2160 /* Covers Opaque ZSL and API1 F/W ZSL */
2161 if (IS_USAGE_ZSL(newStream->usage)
2162 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2163 if (zslStream != NULL) {
2164 LOGE("Multiple input/reprocess streams requested!");
2165 pthread_mutex_unlock(&mMutex);
2166 return BAD_VALUE;
2167 }
2168 zslStream = newStream;
2169 }
2170 /* Covers YUV reprocess */
2171 if (inputStream != NULL) {
2172 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2173 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2174 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2175 && inputStream->width == newStream->width
2176 && inputStream->height == newStream->height) {
2177 if (zslStream != NULL) {
2178 /* This scenario indicates multiple YUV streams with same size
2179 * as input stream have been requested, since zsl stream handle
2180 * is solely use for the purpose of overriding the size of streams
2181 * which share h/w streams we will just make a guess here as to
2182 * which of the stream is a ZSL stream, this will be refactored
2183 * once we make generic logic for streams sharing encoder output
2184 */
2185 LOGH("Warning, Multiple ip/reprocess streams requested!");
2186 }
2187 zslStream = newStream;
2188 }
2189 }
2190 }
2191
2192 /* If a zsl stream is set, we know that we have configured at least one input or
2193 bidirectional stream */
2194 if (NULL != zslStream) {
2195 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2196 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2197 mInputStreamInfo.format = zslStream->format;
2198 mInputStreamInfo.usage = zslStream->usage;
2199 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2200 mInputStreamInfo.dim.width,
2201 mInputStreamInfo.dim.height,
2202 mInputStreamInfo.format, mInputStreamInfo.usage);
2203 }
2204
2205 cleanAndSortStreamInfo();
2206 if (mMetadataChannel) {
2207 delete mMetadataChannel;
2208 mMetadataChannel = NULL;
2209 }
2210 if (mSupportChannel) {
2211 delete mSupportChannel;
2212 mSupportChannel = NULL;
2213 }
2214
2215 if (mAnalysisChannel) {
2216 delete mAnalysisChannel;
2217 mAnalysisChannel = NULL;
2218 }
2219
2220 if (mDummyBatchChannel) {
2221 delete mDummyBatchChannel;
2222 mDummyBatchChannel = NULL;
2223 }
2224
Emilian Peev7650c122017-01-19 08:24:33 -08002225 if (mDepthChannel) {
2226 mDepthChannel = NULL;
2227 }
2228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002229 mShutterDispatcher.clear();
2230 mOutputBufferDispatcher.clear();
2231
Thierry Strudel2896d122017-02-23 19:18:03 -08002232 char is_type_value[PROPERTY_VALUE_MAX];
2233 property_get("persist.camera.is_type", is_type_value, "4");
2234 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2235
Binhao Line406f062017-05-03 14:39:44 -07002236 char property_value[PROPERTY_VALUE_MAX];
2237 property_get("persist.camera.gzoom.at", property_value, "0");
2238 int goog_zoom_at = atoi(property_value);
2239 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2240 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2241
2242 property_get("persist.camera.gzoom.4k", property_value, "0");
2243 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2244
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 //Create metadata channel and initialize it
2246 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2247 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2248 gCamCapability[mCameraId]->color_arrangement);
2249 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2250 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002251 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002252 if (mMetadataChannel == NULL) {
2253 LOGE("failed to allocate metadata channel");
2254 rc = -ENOMEM;
2255 pthread_mutex_unlock(&mMutex);
2256 return rc;
2257 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002258 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002259 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2260 if (rc < 0) {
2261 LOGE("metadata channel initialization failed");
2262 delete mMetadataChannel;
2263 mMetadataChannel = NULL;
2264 pthread_mutex_unlock(&mMutex);
2265 return rc;
2266 }
2267
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002271 // Keep track of preview/video streams indices.
2272 // There could be more than one preview streams, but only one video stream.
2273 int32_t video_stream_idx = -1;
2274 int32_t preview_stream_idx[streamList->num_streams];
2275 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2277 /* Allocate channel objects for the requested streams */
2278 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002279
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 camera3_stream_t *newStream = streamList->streams[i];
2281 uint32_t stream_usage = newStream->usage;
2282 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2283 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2284 struct camera_info *p_info = NULL;
2285 pthread_mutex_lock(&gCamLock);
2286 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2287 pthread_mutex_unlock(&gCamLock);
2288 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2289 || IS_USAGE_ZSL(newStream->usage)) &&
2290 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002293 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2294 if (bUseCommonFeatureMask)
2295 zsl_ppmask = commonFeatureMask;
2296 else
2297 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002299 if (numStreamsOnEncoder > 0)
2300 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2301 else
2302 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002306 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 LOGH("Input stream configured, reprocess config");
2308 } else {
2309 //for non zsl streams find out the format
2310 switch (newStream->format) {
2311 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2312 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002313 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2315 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2316 /* add additional features to pp feature mask */
2317 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2318 mStreamConfigInfo.num_streams);
2319
2320 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2321 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2322 CAM_STREAM_TYPE_VIDEO;
2323 if (m_bTnrEnabled && m_bTnrVideo) {
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2325 CAM_QCOM_FEATURE_CPP_TNR;
2326 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2328 ~CAM_QCOM_FEATURE_CDS;
2329 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2332 CAM_QTI_FEATURE_PPEISCORE;
2333 }
Binhao Line406f062017-05-03 14:39:44 -07002334 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2336 CAM_QCOM_FEATURE_GOOG_ZOOM;
2337 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002338 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 } else {
2340 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2341 CAM_STREAM_TYPE_PREVIEW;
2342 if (m_bTnrEnabled && m_bTnrPreview) {
2343 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2344 CAM_QCOM_FEATURE_CPP_TNR;
2345 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2347 ~CAM_QCOM_FEATURE_CDS;
2348 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002349 if(!m_bSwTnrPreview) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2351 ~CAM_QTI_FEATURE_SW_TNR;
2352 }
Binhao Line406f062017-05-03 14:39:44 -07002353 if (is_goog_zoom_preview_enabled) {
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2355 CAM_QCOM_FEATURE_GOOG_ZOOM;
2356 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002357 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 padding_info.width_padding = mSurfaceStridePadding;
2359 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002360 previewSize.width = (int32_t)newStream->width;
2361 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 }
2363 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2364 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2365 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2366 newStream->height;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2368 newStream->width;
2369 }
2370 }
2371 break;
2372 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002373 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2375 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2376 if (bUseCommonFeatureMask)
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2378 commonFeatureMask;
2379 else
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2381 CAM_QCOM_FEATURE_NONE;
2382 } else {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2384 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 }
2386 break;
2387 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2390 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2391 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2392 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2393 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 /* Remove rotation if it is not supported
2395 for 4K LiveVideo snapshot case (online processing) */
2396 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2397 CAM_QCOM_FEATURE_ROTATION)) {
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2399 &= ~CAM_QCOM_FEATURE_ROTATION;
2400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002401 } else {
2402 if (bUseCommonFeatureMask &&
2403 isOnEncoder(maxViewfinderSize, newStream->width,
2404 newStream->height)) {
2405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2406 } else {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2408 }
2409 }
2410 if (isZsl) {
2411 if (zslStream) {
2412 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2413 (int32_t)zslStream->width;
2414 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2415 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 } else {
2419 LOGE("Error, No ZSL stream identified");
2420 pthread_mutex_unlock(&mMutex);
2421 return -EINVAL;
2422 }
2423 } else if (m_bIs4KVideo) {
2424 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2425 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2426 } else if (bYuv888OverrideJpeg) {
2427 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2428 (int32_t)largeYuv888Size.width;
2429 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2430 (int32_t)largeYuv888Size.height;
2431 }
2432 break;
2433 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2434 case HAL_PIXEL_FORMAT_RAW16:
2435 case HAL_PIXEL_FORMAT_RAW10:
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2438 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002439 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2440 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2441 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2442 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2443 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2444 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2445 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2446 gCamCapability[mCameraId]->dt[mPDIndex];
2447 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2448 gCamCapability[mCameraId]->vc[mPDIndex];
2449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002450 break;
2451 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002452 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2455 break;
2456 }
2457 }
2458
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462
2463 if (newStream->priv == NULL) {
2464 //New stream, construct channel
2465 switch (newStream->stream_type) {
2466 case CAMERA3_STREAM_INPUT:
2467 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2469 break;
2470 case CAMERA3_STREAM_BIDIRECTIONAL:
2471 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2472 GRALLOC_USAGE_HW_CAMERA_WRITE;
2473 break;
2474 case CAMERA3_STREAM_OUTPUT:
2475 /* For video encoding stream, set read/write rarely
2476 * flag so that they may be set to un-cached */
2477 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2478 newStream->usage |=
2479 (GRALLOC_USAGE_SW_READ_RARELY |
2480 GRALLOC_USAGE_SW_WRITE_RARELY |
2481 GRALLOC_USAGE_HW_CAMERA_WRITE);
2482 else if (IS_USAGE_ZSL(newStream->usage))
2483 {
2484 LOGD("ZSL usage flag skipping");
2485 }
2486 else if (newStream == zslStream
2487 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2488 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2489 } else
2490 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2491 break;
2492 default:
2493 LOGE("Invalid stream_type %d", newStream->stream_type);
2494 break;
2495 }
2496
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002497 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2499 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2500 QCamera3ProcessingChannel *channel = NULL;
2501 switch (newStream->format) {
2502 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2503 if ((newStream->usage &
2504 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2505 (streamList->operation_mode ==
2506 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2507 ) {
2508 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2509 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002510 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 this,
2512 newStream,
2513 (cam_stream_type_t)
2514 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2516 mMetadataChannel,
2517 0); //heap buffers are not required for HFR video channel
2518 if (channel == NULL) {
2519 LOGE("allocation of channel failed");
2520 pthread_mutex_unlock(&mMutex);
2521 return -ENOMEM;
2522 }
2523 //channel->getNumBuffers() will return 0 here so use
2524 //MAX_INFLIGH_HFR_REQUESTS
2525 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2526 newStream->priv = channel;
2527 LOGI("num video buffers in HFR mode: %d",
2528 MAX_INFLIGHT_HFR_REQUESTS);
2529 } else {
2530 /* Copy stream contents in HFR preview only case to create
2531 * dummy batch channel so that sensor streaming is in
2532 * HFR mode */
2533 if (!m_bIsVideo && (streamList->operation_mode ==
2534 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2535 mDummyBatchStream = *newStream;
2536 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 int bufferCount = MAX_INFLIGHT_REQUESTS;
2538 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2539 CAM_STREAM_TYPE_VIDEO) {
2540 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2541 bufferCount = MAX_VIDEO_BUFFERS;
2542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2544 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002545 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 this,
2547 newStream,
2548 (cam_stream_type_t)
2549 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2551 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 if (channel == NULL) {
2554 LOGE("allocation of channel failed");
2555 pthread_mutex_unlock(&mMutex);
2556 return -ENOMEM;
2557 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002558 /* disable UBWC for preview, though supported,
2559 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002560 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002561 (previewSize.width == (int32_t)videoWidth)&&
2562 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002563 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002564 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002565 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002566 /* When goog_zoom is linked to the preview or video stream,
2567 * disable ubwc to the linked stream */
2568 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2569 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2570 channel->setUBWCEnabled(false);
2571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 newStream->max_buffers = channel->getNumBuffers();
2573 newStream->priv = channel;
2574 }
2575 break;
2576 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2577 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2578 mChannelHandle,
2579 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002580 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 this,
2582 newStream,
2583 (cam_stream_type_t)
2584 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2586 mMetadataChannel);
2587 if (channel == NULL) {
2588 LOGE("allocation of YUV channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = channel->getNumBuffers();
2593 newStream->priv = channel;
2594 break;
2595 }
2596 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2597 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002598 case HAL_PIXEL_FORMAT_RAW10: {
2599 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2600 (HAL_DATASPACE_DEPTH != newStream->data_space))
2601 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002602 mRawChannel = new QCamera3RawChannel(
2603 mCameraHandle->camera_handle, mChannelHandle,
2604 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002605 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 this, newStream,
2607 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002608 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002609 if (mRawChannel == NULL) {
2610 LOGE("allocation of raw channel failed");
2611 pthread_mutex_unlock(&mMutex);
2612 return -ENOMEM;
2613 }
2614 newStream->max_buffers = mRawChannel->getNumBuffers();
2615 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2616 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002618 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002619 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2620 mDepthChannel = new QCamera3DepthChannel(
2621 mCameraHandle->camera_handle, mChannelHandle,
2622 mCameraHandle->ops, NULL, NULL, &padding_info,
2623 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2624 mMetadataChannel);
2625 if (NULL == mDepthChannel) {
2626 LOGE("Allocation of depth channel failed");
2627 pthread_mutex_unlock(&mMutex);
2628 return NO_MEMORY;
2629 }
2630 newStream->priv = mDepthChannel;
2631 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2632 } else {
2633 // Max live snapshot inflight buffer is 1. This is to mitigate
2634 // frame drop issues for video snapshot. The more buffers being
2635 // allocated, the more frame drops there are.
2636 mPictureChannel = new QCamera3PicChannel(
2637 mCameraHandle->camera_handle, mChannelHandle,
2638 mCameraHandle->ops, captureResultCb,
2639 setBufferErrorStatus, &padding_info, this, newStream,
2640 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2641 m_bIs4KVideo, isZsl, mMetadataChannel,
2642 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2643 if (mPictureChannel == NULL) {
2644 LOGE("allocation of channel failed");
2645 pthread_mutex_unlock(&mMutex);
2646 return -ENOMEM;
2647 }
2648 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2649 newStream->max_buffers = mPictureChannel->getNumBuffers();
2650 mPictureChannel->overrideYuvSize(
2651 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2652 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 break;
2655
2656 default:
2657 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002658 pthread_mutex_unlock(&mMutex);
2659 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 }
2661 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2662 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2663 } else {
2664 LOGE("Error, Unknown stream type");
2665 pthread_mutex_unlock(&mMutex);
2666 return -EINVAL;
2667 }
2668
2669 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002670 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2671 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002673 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2675 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2676 }
2677 }
2678
2679 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2680 it != mStreamInfo.end(); it++) {
2681 if ((*it)->stream == newStream) {
2682 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2683 break;
2684 }
2685 }
2686 } else {
2687 // Channel already exists for this stream
2688 // Do nothing for now
2689 }
2690 padding_info = gCamCapability[mCameraId]->padding_info;
2691
Emilian Peev7650c122017-01-19 08:24:33 -08002692 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 * since there is no real stream associated with it
2694 */
Emilian Peev7650c122017-01-19 08:24:33 -08002695 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002696 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2697 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002698 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 }
2701
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002702 // Let buffer dispatcher know the configured streams.
2703 mOutputBufferDispatcher.configureStreams(streamList);
2704
Binhao Lincdb362a2017-04-20 13:31:54 -07002705 // By default, preview stream TNR is disabled.
2706 // Enable TNR to the preview stream if all conditions below are satisfied:
2707 // 1. resolution <= 1080p.
2708 // 2. preview resolution == video resolution.
2709 // 3. video stream TNR is enabled.
2710 // 4. EIS2.0
2711 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2712 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2713 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2714 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2715 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2716 video_stream->width == preview_stream->width &&
2717 video_stream->height == preview_stream->height) {
2718 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2719 CAM_QCOM_FEATURE_CPP_TNR;
2720 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2721 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2722 ~CAM_QCOM_FEATURE_CDS;
2723 }
2724 }
2725
Thierry Strudel2896d122017-02-23 19:18:03 -08002726 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2727 onlyRaw = false;
2728 }
2729
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002730 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002731 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002732 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002733 cam_analysis_info_t analysisInfo;
2734 int32_t ret = NO_ERROR;
2735 ret = mCommon.getAnalysisInfo(
2736 FALSE,
2737 analysisFeatureMask,
2738 &analysisInfo);
2739 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002740 cam_color_filter_arrangement_t analysis_color_arrangement =
2741 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2742 CAM_FILTER_ARRANGEMENT_Y :
2743 gCamCapability[mCameraId]->color_arrangement);
2744 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2745 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002746 cam_dimension_t analysisDim;
2747 analysisDim = mCommon.getMatchingDimension(previewSize,
2748 analysisInfo.analysis_recommended_res);
2749
2750 mAnalysisChannel = new QCamera3SupportChannel(
2751 mCameraHandle->camera_handle,
2752 mChannelHandle,
2753 mCameraHandle->ops,
2754 &analysisInfo.analysis_padding_info,
2755 analysisFeatureMask,
2756 CAM_STREAM_TYPE_ANALYSIS,
2757 &analysisDim,
2758 (analysisInfo.analysis_format
2759 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2760 : CAM_FORMAT_YUV_420_NV21),
2761 analysisInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
2763 this,
2764 0); // force buffer count to 0
2765 } else {
2766 LOGW("getAnalysisInfo failed, ret = %d", ret);
2767 }
2768 if (!mAnalysisChannel) {
2769 LOGW("Analysis channel cannot be created");
2770 }
2771 }
2772
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 //RAW DUMP channel
2774 if (mEnableRawDump && isRawStreamRequested == false){
2775 cam_dimension_t rawDumpSize;
2776 rawDumpSize = getMaxRawSize(mCameraId);
2777 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2778 setPAAFSupport(rawDumpFeatureMask,
2779 CAM_STREAM_TYPE_RAW,
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2782 mChannelHandle,
2783 mCameraHandle->ops,
2784 rawDumpSize,
2785 &padding_info,
2786 this, rawDumpFeatureMask);
2787 if (!mRawDumpChannel) {
2788 LOGE("Raw Dump channel cannot be created");
2789 pthread_mutex_unlock(&mMutex);
2790 return -ENOMEM;
2791 }
2792 }
2793
Thierry Strudel3d639192016-09-09 11:52:26 -07002794 if (mAnalysisChannel) {
2795 cam_analysis_info_t analysisInfo;
2796 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2797 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2798 CAM_STREAM_TYPE_ANALYSIS;
2799 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2800 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2803 &analysisInfo);
2804 if (rc != NO_ERROR) {
2805 LOGE("getAnalysisInfo failed, ret = %d", rc);
2806 pthread_mutex_unlock(&mMutex);
2807 return rc;
2808 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002809 cam_color_filter_arrangement_t analysis_color_arrangement =
2810 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2811 CAM_FILTER_ARRANGEMENT_Y :
2812 gCamCapability[mCameraId]->color_arrangement);
2813 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2814 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2815 analysis_color_arrangement);
2816
Thierry Strudel3d639192016-09-09 11:52:26 -07002817 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 mCommon.getMatchingDimension(previewSize,
2819 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 mStreamConfigInfo.num_streams++;
2821 }
2822
Thierry Strudel2896d122017-02-23 19:18:03 -08002823 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 cam_analysis_info_t supportInfo;
2825 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2826 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(callbackFeatureMask,
2828 CAM_STREAM_TYPE_CALLBACK,
2829 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002830 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002832 if (ret != NO_ERROR) {
2833 /* Ignore the error for Mono camera
2834 * because the PAAF bit mask is only set
2835 * for CAM_STREAM_TYPE_ANALYSIS stream type
2836 */
2837 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2838 LOGW("getAnalysisInfo failed, ret = %d", ret);
2839 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002840 }
2841 mSupportChannel = new QCamera3SupportChannel(
2842 mCameraHandle->camera_handle,
2843 mChannelHandle,
2844 mCameraHandle->ops,
2845 &gCamCapability[mCameraId]->padding_info,
2846 callbackFeatureMask,
2847 CAM_STREAM_TYPE_CALLBACK,
2848 &QCamera3SupportChannel::kDim,
2849 CAM_FORMAT_YUV_420_NV21,
2850 supportInfo.hw_analysis_supported,
2851 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002852 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002853 if (!mSupportChannel) {
2854 LOGE("dummy channel cannot be created");
2855 pthread_mutex_unlock(&mMutex);
2856 return -ENOMEM;
2857 }
2858 }
2859
2860 if (mSupportChannel) {
2861 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2862 QCamera3SupportChannel::kDim;
2863 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2864 CAM_STREAM_TYPE_CALLBACK;
2865 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2866 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2867 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2868 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2869 gCamCapability[mCameraId]->color_arrangement);
2870 mStreamConfigInfo.num_streams++;
2871 }
2872
2873 if (mRawDumpChannel) {
2874 cam_dimension_t rawSize;
2875 rawSize = getMaxRawSize(mCameraId);
2876 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2877 rawSize;
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2879 CAM_STREAM_TYPE_RAW;
2880 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2881 CAM_QCOM_FEATURE_NONE;
2882 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2883 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2884 gCamCapability[mCameraId]->color_arrangement);
2885 mStreamConfigInfo.num_streams++;
2886 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002887
2888 if (mHdrPlusRawSrcChannel) {
2889 cam_dimension_t rawSize;
2890 rawSize = getMaxRawSize(mCameraId);
2891 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2892 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2893 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2894 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2896 gCamCapability[mCameraId]->color_arrangement);
2897 mStreamConfigInfo.num_streams++;
2898 }
2899
Thierry Strudel3d639192016-09-09 11:52:26 -07002900 /* In HFR mode, if video stream is not added, create a dummy channel so that
2901 * ISP can create a batch mode even for preview only case. This channel is
2902 * never 'start'ed (no stream-on), it is only 'initialized' */
2903 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2904 !m_bIsVideo) {
2905 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2906 setPAAFSupport(dummyFeatureMask,
2907 CAM_STREAM_TYPE_VIDEO,
2908 gCamCapability[mCameraId]->color_arrangement);
2909 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2910 mChannelHandle,
2911 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002912 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002913 this,
2914 &mDummyBatchStream,
2915 CAM_STREAM_TYPE_VIDEO,
2916 dummyFeatureMask,
2917 mMetadataChannel);
2918 if (NULL == mDummyBatchChannel) {
2919 LOGE("creation of mDummyBatchChannel failed."
2920 "Preview will use non-hfr sensor mode ");
2921 }
2922 }
2923 if (mDummyBatchChannel) {
2924 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2925 mDummyBatchStream.width;
2926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2927 mDummyBatchStream.height;
2928 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2929 CAM_STREAM_TYPE_VIDEO;
2930 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2931 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2932 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2934 gCamCapability[mCameraId]->color_arrangement);
2935 mStreamConfigInfo.num_streams++;
2936 }
2937
2938 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2939 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002940 m_bIs4KVideo ? 0 :
2941 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002942
2943 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2944 for (pendingRequestIterator i = mPendingRequestsList.begin();
2945 i != mPendingRequestsList.end();) {
2946 i = erasePendingRequest(i);
2947 }
2948 mPendingFrameDropList.clear();
2949 // Initialize/Reset the pending buffers list
2950 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2951 req.mPendingBufferList.clear();
2952 }
2953 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 mCurJpegMeta.clear();
2956 //Get min frame duration for this streams configuration
2957 deriveMinFrameDuration();
2958
Chien-Yu Chenee335912017-02-09 17:53:20 -08002959 mFirstPreviewIntentSeen = false;
2960
2961 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002962 {
2963 Mutex::Autolock l(gHdrPlusClientLock);
2964 disableHdrPlusModeLocked();
2965 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002966
Thierry Strudel3d639192016-09-09 11:52:26 -07002967 // Update state
2968 mState = CONFIGURED;
2969
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002970 mFirstMetadataCallback = true;
2971
Thierry Strudel3d639192016-09-09 11:52:26 -07002972 pthread_mutex_unlock(&mMutex);
2973
2974 return rc;
2975}
2976
2977/*===========================================================================
2978 * FUNCTION : validateCaptureRequest
2979 *
2980 * DESCRIPTION: validate a capture request from camera service
2981 *
2982 * PARAMETERS :
2983 * @request : request from framework to process
2984 *
2985 * RETURN :
2986 *
2987 *==========================================================================*/
2988int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002989 camera3_capture_request_t *request,
2990 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002991{
2992 ssize_t idx = 0;
2993 const camera3_stream_buffer_t *b;
2994 CameraMetadata meta;
2995
2996 /* Sanity check the request */
2997 if (request == NULL) {
2998 LOGE("NULL capture request");
2999 return BAD_VALUE;
3000 }
3001
3002 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3003 /*settings cannot be null for the first request*/
3004 return BAD_VALUE;
3005 }
3006
3007 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003008 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3009 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003010 LOGE("Request %d: No output buffers provided!",
3011 __FUNCTION__, frameNumber);
3012 return BAD_VALUE;
3013 }
3014 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3015 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3016 request->num_output_buffers, MAX_NUM_STREAMS);
3017 return BAD_VALUE;
3018 }
3019 if (request->input_buffer != NULL) {
3020 b = request->input_buffer;
3021 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3022 LOGE("Request %d: Buffer %ld: Status not OK!",
3023 frameNumber, (long)idx);
3024 return BAD_VALUE;
3025 }
3026 if (b->release_fence != -1) {
3027 LOGE("Request %d: Buffer %ld: Has a release fence!",
3028 frameNumber, (long)idx);
3029 return BAD_VALUE;
3030 }
3031 if (b->buffer == NULL) {
3032 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3033 frameNumber, (long)idx);
3034 return BAD_VALUE;
3035 }
3036 }
3037
3038 // Validate all buffers
3039 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003040 if (b == NULL) {
3041 return BAD_VALUE;
3042 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003043 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 QCamera3ProcessingChannel *channel =
3045 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3046 if (channel == NULL) {
3047 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3048 frameNumber, (long)idx);
3049 return BAD_VALUE;
3050 }
3051 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3052 LOGE("Request %d: Buffer %ld: Status not OK!",
3053 frameNumber, (long)idx);
3054 return BAD_VALUE;
3055 }
3056 if (b->release_fence != -1) {
3057 LOGE("Request %d: Buffer %ld: Has a release fence!",
3058 frameNumber, (long)idx);
3059 return BAD_VALUE;
3060 }
3061 if (b->buffer == NULL) {
3062 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3063 frameNumber, (long)idx);
3064 return BAD_VALUE;
3065 }
3066 if (*(b->buffer) == NULL) {
3067 LOGE("Request %d: Buffer %ld: NULL private handle!",
3068 frameNumber, (long)idx);
3069 return BAD_VALUE;
3070 }
3071 idx++;
3072 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 return NO_ERROR;
3075}
3076
3077/*===========================================================================
3078 * FUNCTION : deriveMinFrameDuration
3079 *
3080 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3081 * on currently configured streams.
3082 *
3083 * PARAMETERS : NONE
3084 *
3085 * RETURN : NONE
3086 *
3087 *==========================================================================*/
3088void QCamera3HardwareInterface::deriveMinFrameDuration()
3089{
3090 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3091
3092 maxJpegDim = 0;
3093 maxProcessedDim = 0;
3094 maxRawDim = 0;
3095
3096 // Figure out maximum jpeg, processed, and raw dimensions
3097 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3098 it != mStreamInfo.end(); it++) {
3099
3100 // Input stream doesn't have valid stream_type
3101 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3102 continue;
3103
3104 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3105 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3106 if (dimension > maxJpegDim)
3107 maxJpegDim = dimension;
3108 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3109 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3110 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3111 if (dimension > maxRawDim)
3112 maxRawDim = dimension;
3113 } else {
3114 if (dimension > maxProcessedDim)
3115 maxProcessedDim = dimension;
3116 }
3117 }
3118
3119 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3120 MAX_SIZES_CNT);
3121
3122 //Assume all jpeg dimensions are in processed dimensions.
3123 if (maxJpegDim > maxProcessedDim)
3124 maxProcessedDim = maxJpegDim;
3125 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3126 if (maxProcessedDim > maxRawDim) {
3127 maxRawDim = INT32_MAX;
3128
3129 for (size_t i = 0; i < count; i++) {
3130 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3131 gCamCapability[mCameraId]->raw_dim[i].height;
3132 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3133 maxRawDim = dimension;
3134 }
3135 }
3136
3137 //Find minimum durations for processed, jpeg, and raw
3138 for (size_t i = 0; i < count; i++) {
3139 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3140 gCamCapability[mCameraId]->raw_dim[i].height) {
3141 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3142 break;
3143 }
3144 }
3145 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3146 for (size_t i = 0; i < count; i++) {
3147 if (maxProcessedDim ==
3148 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3149 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3150 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3151 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3152 break;
3153 }
3154 }
3155}
3156
3157/*===========================================================================
3158 * FUNCTION : getMinFrameDuration
3159 *
3160 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3161 * and current request configuration.
3162 *
3163 * PARAMETERS : @request: requset sent by the frameworks
3164 *
3165 * RETURN : min farme duration for a particular request
3166 *
3167 *==========================================================================*/
3168int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3169{
3170 bool hasJpegStream = false;
3171 bool hasRawStream = false;
3172 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3173 const camera3_stream_t *stream = request->output_buffers[i].stream;
3174 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3175 hasJpegStream = true;
3176 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3177 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3178 stream->format == HAL_PIXEL_FORMAT_RAW16)
3179 hasRawStream = true;
3180 }
3181
3182 if (!hasJpegStream)
3183 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3184 else
3185 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3186}
3187
3188/*===========================================================================
3189 * FUNCTION : handleBuffersDuringFlushLock
3190 *
3191 * DESCRIPTION: Account for buffers returned from back-end during flush
3192 * This function is executed while mMutex is held by the caller.
3193 *
3194 * PARAMETERS :
3195 * @buffer: image buffer for the callback
3196 *
3197 * RETURN :
3198 *==========================================================================*/
3199void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3200{
3201 bool buffer_found = false;
3202 for (List<PendingBuffersInRequest>::iterator req =
3203 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3204 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3205 for (List<PendingBufferInfo>::iterator i =
3206 req->mPendingBufferList.begin();
3207 i != req->mPendingBufferList.end(); i++) {
3208 if (i->buffer == buffer->buffer) {
3209 mPendingBuffersMap.numPendingBufsAtFlush--;
3210 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3211 buffer->buffer, req->frame_number,
3212 mPendingBuffersMap.numPendingBufsAtFlush);
3213 buffer_found = true;
3214 break;
3215 }
3216 }
3217 if (buffer_found) {
3218 break;
3219 }
3220 }
3221 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3222 //signal the flush()
3223 LOGD("All buffers returned to HAL. Continue flush");
3224 pthread_cond_signal(&mBuffersCond);
3225 }
3226}
3227
Thierry Strudel3d639192016-09-09 11:52:26 -07003228/*===========================================================================
3229 * FUNCTION : handleBatchMetadata
3230 *
3231 * DESCRIPTION: Handles metadata buffer callback in batch mode
3232 *
3233 * PARAMETERS : @metadata_buf: metadata buffer
3234 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3235 * the meta buf in this method
3236 *
3237 * RETURN :
3238 *
3239 *==========================================================================*/
3240void QCamera3HardwareInterface::handleBatchMetadata(
3241 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3242{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003243 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003244
3245 if (NULL == metadata_buf) {
3246 LOGE("metadata_buf is NULL");
3247 return;
3248 }
3249 /* In batch mode, the metdata will contain the frame number and timestamp of
3250 * the last frame in the batch. Eg: a batch containing buffers from request
3251 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3252 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3253 * multiple process_capture_results */
3254 metadata_buffer_t *metadata =
3255 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3256 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3257 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3258 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3259 uint32_t frame_number = 0, urgent_frame_number = 0;
3260 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3261 bool invalid_metadata = false;
3262 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3263 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003264 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003265
3266 int32_t *p_frame_number_valid =
3267 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3268 uint32_t *p_frame_number =
3269 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3270 int64_t *p_capture_time =
3271 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3272 int32_t *p_urgent_frame_number_valid =
3273 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3274 uint32_t *p_urgent_frame_number =
3275 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3276
3277 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3278 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3279 (NULL == p_urgent_frame_number)) {
3280 LOGE("Invalid metadata");
3281 invalid_metadata = true;
3282 } else {
3283 frame_number_valid = *p_frame_number_valid;
3284 last_frame_number = *p_frame_number;
3285 last_frame_capture_time = *p_capture_time;
3286 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3287 last_urgent_frame_number = *p_urgent_frame_number;
3288 }
3289
3290 /* In batchmode, when no video buffers are requested, set_parms are sent
3291 * for every capture_request. The difference between consecutive urgent
3292 * frame numbers and frame numbers should be used to interpolate the
3293 * corresponding frame numbers and time stamps */
3294 pthread_mutex_lock(&mMutex);
3295 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003296 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3297 if(idx < 0) {
3298 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3299 last_urgent_frame_number);
3300 mState = ERROR;
3301 pthread_mutex_unlock(&mMutex);
3302 return;
3303 }
3304 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003305 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3306 first_urgent_frame_number;
3307
3308 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3309 urgent_frame_number_valid,
3310 first_urgent_frame_number, last_urgent_frame_number);
3311 }
3312
3313 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003314 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3315 if(idx < 0) {
3316 LOGE("Invalid frame number received: %d. Irrecoverable error",
3317 last_frame_number);
3318 mState = ERROR;
3319 pthread_mutex_unlock(&mMutex);
3320 return;
3321 }
3322 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323 frameNumDiff = last_frame_number + 1 -
3324 first_frame_number;
3325 mPendingBatchMap.removeItem(last_frame_number);
3326
3327 LOGD("frm: valid: %d frm_num: %d - %d",
3328 frame_number_valid,
3329 first_frame_number, last_frame_number);
3330
3331 }
3332 pthread_mutex_unlock(&mMutex);
3333
3334 if (urgent_frame_number_valid || frame_number_valid) {
3335 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3336 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3337 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3338 urgentFrameNumDiff, last_urgent_frame_number);
3339 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3340 LOGE("frameNumDiff: %d frameNum: %d",
3341 frameNumDiff, last_frame_number);
3342 }
3343
3344 for (size_t i = 0; i < loopCount; i++) {
3345 /* handleMetadataWithLock is called even for invalid_metadata for
3346 * pipeline depth calculation */
3347 if (!invalid_metadata) {
3348 /* Infer frame number. Batch metadata contains frame number of the
3349 * last frame */
3350 if (urgent_frame_number_valid) {
3351 if (i < urgentFrameNumDiff) {
3352 urgent_frame_number =
3353 first_urgent_frame_number + i;
3354 LOGD("inferred urgent frame_number: %d",
3355 urgent_frame_number);
3356 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3357 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3358 } else {
3359 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3360 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3361 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3362 }
3363 }
3364
3365 /* Infer frame number. Batch metadata contains frame number of the
3366 * last frame */
3367 if (frame_number_valid) {
3368 if (i < frameNumDiff) {
3369 frame_number = first_frame_number + i;
3370 LOGD("inferred frame_number: %d", frame_number);
3371 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3372 CAM_INTF_META_FRAME_NUMBER, frame_number);
3373 } else {
3374 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3375 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3376 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3377 }
3378 }
3379
3380 if (last_frame_capture_time) {
3381 //Infer timestamp
3382 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003383 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003384 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003385 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003386 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3387 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3388 LOGD("batch capture_time: %lld, capture_time: %lld",
3389 last_frame_capture_time, capture_time);
3390 }
3391 }
3392 pthread_mutex_lock(&mMutex);
3393 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003394 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003395 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3396 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003397 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 pthread_mutex_unlock(&mMutex);
3399 }
3400
3401 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003402 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003403 mMetadataChannel->bufDone(metadata_buf);
3404 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003405 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 }
3407}
3408
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003409void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3410 camera3_error_msg_code_t errorCode)
3411{
3412 camera3_notify_msg_t notify_msg;
3413 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3414 notify_msg.type = CAMERA3_MSG_ERROR;
3415 notify_msg.message.error.error_code = errorCode;
3416 notify_msg.message.error.error_stream = NULL;
3417 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003418 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003419
3420 return;
3421}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003422
3423/*===========================================================================
3424 * FUNCTION : sendPartialMetadataWithLock
3425 *
3426 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3427 *
3428 * PARAMETERS : @metadata: metadata buffer
3429 * @requestIter: The iterator for the pending capture request for
3430 * which the partial result is being sen
3431 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3432 * last urgent metadata in a batch. Always true for non-batch mode
3433 *
3434 * RETURN :
3435 *
3436 *==========================================================================*/
3437
3438void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3439 metadata_buffer_t *metadata,
3440 const pendingRequestIterator requestIter,
3441 bool lastUrgentMetadataInBatch)
3442{
3443 camera3_capture_result_t result;
3444 memset(&result, 0, sizeof(camera3_capture_result_t));
3445
3446 requestIter->partial_result_cnt++;
3447
3448 // Extract 3A metadata
3449 result.result = translateCbUrgentMetadataToResultMetadata(
3450 metadata, lastUrgentMetadataInBatch);
3451 // Populate metadata result
3452 result.frame_number = requestIter->frame_number;
3453 result.num_output_buffers = 0;
3454 result.output_buffers = NULL;
3455 result.partial_result = requestIter->partial_result_cnt;
3456
3457 {
3458 Mutex::Autolock l(gHdrPlusClientLock);
3459 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3460 // Notify HDR+ client about the partial metadata.
3461 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3462 result.partial_result == PARTIAL_RESULT_COUNT);
3463 }
3464 }
3465
3466 orchestrateResult(&result);
3467 LOGD("urgent frame_number = %u", result.frame_number);
3468 free_camera_metadata((camera_metadata_t *)result.result);
3469}
3470
Thierry Strudel3d639192016-09-09 11:52:26 -07003471/*===========================================================================
3472 * FUNCTION : handleMetadataWithLock
3473 *
3474 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3475 *
3476 * PARAMETERS : @metadata_buf: metadata buffer
3477 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3478 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003479 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3480 * last urgent metadata in a batch. Always true for non-batch mode
3481 * @lastMetadataInBatch: Boolean to indicate whether this is the
3482 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003483 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3484 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 *
3486 * RETURN :
3487 *
3488 *==========================================================================*/
3489void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003490 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003491 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3492 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003493{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3496 //during flush do not send metadata from this thread
3497 LOGD("not sending metadata during flush or when mState is error");
3498 if (free_and_bufdone_meta_buf) {
3499 mMetadataChannel->bufDone(metadata_buf);
3500 free(metadata_buf);
3501 }
3502 return;
3503 }
3504
3505 //not in flush
3506 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3507 int32_t frame_number_valid, urgent_frame_number_valid;
3508 uint32_t frame_number, urgent_frame_number;
3509 int64_t capture_time;
3510 nsecs_t currentSysTime;
3511
3512 int32_t *p_frame_number_valid =
3513 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3514 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3515 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3516 int32_t *p_urgent_frame_number_valid =
3517 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3518 uint32_t *p_urgent_frame_number =
3519 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3520 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3521 metadata) {
3522 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3523 *p_frame_number_valid, *p_frame_number);
3524 }
3525
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003526 camera_metadata_t *resultMetadata = nullptr;
3527
Thierry Strudel3d639192016-09-09 11:52:26 -07003528 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3529 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3530 LOGE("Invalid metadata");
3531 if (free_and_bufdone_meta_buf) {
3532 mMetadataChannel->bufDone(metadata_buf);
3533 free(metadata_buf);
3534 }
3535 goto done_metadata;
3536 }
3537 frame_number_valid = *p_frame_number_valid;
3538 frame_number = *p_frame_number;
3539 capture_time = *p_capture_time;
3540 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3541 urgent_frame_number = *p_urgent_frame_number;
3542 currentSysTime = systemTime(CLOCK_MONOTONIC);
3543
3544 // Detect if buffers from any requests are overdue
3545 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003546 int64_t timeout;
3547 {
3548 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3549 // If there is a pending HDR+ request, the following requests may be blocked until the
3550 // HDR+ request is done. So allow a longer timeout.
3551 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3552 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3553 }
3554
3555 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003556 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003557 assert(missed.stream->priv);
3558 if (missed.stream->priv) {
3559 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3560 assert(ch->mStreams[0]);
3561 if (ch->mStreams[0]) {
3562 LOGE("Cancel missing frame = %d, buffer = %p,"
3563 "stream type = %d, stream format = %d",
3564 req.frame_number, missed.buffer,
3565 ch->mStreams[0]->getMyType(), missed.stream->format);
3566 ch->timeoutFrame(req.frame_number);
3567 }
3568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003569 }
3570 }
3571 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003572 //For the very first metadata callback, regardless whether it contains valid
3573 //frame number, send the partial metadata for the jumpstarting requests.
3574 //Note that this has to be done even if the metadata doesn't contain valid
3575 //urgent frame number, because in the case only 1 request is ever submitted
3576 //to HAL, there won't be subsequent valid urgent frame number.
3577 if (mFirstMetadataCallback) {
3578 for (pendingRequestIterator i =
3579 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3580 if (i->bUseFirstPartial) {
3581 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3582 }
3583 }
3584 mFirstMetadataCallback = false;
3585 }
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 //Partial result on process_capture_result for timestamp
3588 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003589 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003590
3591 //Recieved an urgent Frame Number, handle it
3592 //using partial results
3593 for (pendingRequestIterator i =
3594 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3595 LOGD("Iterator Frame = %d urgent frame = %d",
3596 i->frame_number, urgent_frame_number);
3597
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003598 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003599 (i->partial_result_cnt == 0)) {
3600 LOGE("Error: HAL missed urgent metadata for frame number %d",
3601 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003602 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003603 }
3604
3605 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003606 i->partial_result_cnt == 0) {
3607 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003608 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3609 // Instant AEC settled for this frame.
3610 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3611 mInstantAECSettledFrameNumber = urgent_frame_number;
3612 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003613 break;
3614 }
3615 }
3616 }
3617
3618 if (!frame_number_valid) {
3619 LOGD("Not a valid normal frame number, used as SOF only");
3620 if (free_and_bufdone_meta_buf) {
3621 mMetadataChannel->bufDone(metadata_buf);
3622 free(metadata_buf);
3623 }
3624 goto done_metadata;
3625 }
3626 LOGH("valid frame_number = %u, capture_time = %lld",
3627 frame_number, capture_time);
3628
Emilian Peev7650c122017-01-19 08:24:33 -08003629 if (metadata->is_depth_data_valid) {
3630 handleDepthDataLocked(metadata->depth_data, frame_number);
3631 }
3632
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633 // Check whether any stream buffer corresponding to this is dropped or not
3634 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3635 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3636 for (auto & pendingRequest : mPendingRequestsList) {
3637 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3638 mInstantAECSettledFrameNumber)) {
3639 camera3_notify_msg_t notify_msg = {};
3640 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003641 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 QCamera3ProcessingChannel *channel =
3643 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003644 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003645 if (p_cam_frame_drop) {
3646 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003647 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003648 // Got the stream ID for drop frame.
3649 dropFrame = true;
3650 break;
3651 }
3652 }
3653 } else {
3654 // This is instant AEC case.
3655 // For instant AEC drop the stream untill AEC is settled.
3656 dropFrame = true;
3657 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003659 if (dropFrame) {
3660 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3661 if (p_cam_frame_drop) {
3662 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003663 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003665 } else {
3666 // For instant AEC, inform frame drop and frame number
3667 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3668 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003669 pendingRequest.frame_number, streamID,
3670 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003671 }
3672 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003674 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003675 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003676 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003677 if (p_cam_frame_drop) {
3678 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003679 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003681 } else {
3682 // For instant AEC, inform frame drop and frame number
3683 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3684 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003685 pendingRequest.frame_number, streamID,
3686 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003687 }
3688 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003689 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003690 PendingFrameDrop.stream_ID = streamID;
3691 // Add the Frame drop info to mPendingFrameDropList
3692 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003693 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003694 }
3695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003697
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003698 for (auto & pendingRequest : mPendingRequestsList) {
3699 // Find the pending request with the frame number.
3700 if (pendingRequest.frame_number == frame_number) {
3701 // Update the sensor timestamp.
3702 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003703
Thierry Strudel3d639192016-09-09 11:52:26 -07003704
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003705 /* Set the timestamp in display metadata so that clients aware of
3706 private_handle such as VT can use this un-modified timestamps.
3707 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003708 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003709
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 // Find channel requiring metadata, meaning internal offline postprocess
3711 // is needed.
3712 //TODO: for now, we don't support two streams requiring metadata at the same time.
3713 // (because we are not making copies, and metadata buffer is not reference counted.
3714 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3716 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003717 if (iter->need_metadata) {
3718 internalPproc = true;
3719 QCamera3ProcessingChannel *channel =
3720 (QCamera3ProcessingChannel *)iter->stream->priv;
3721 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003722 if(p_is_metabuf_queued != NULL) {
3723 *p_is_metabuf_queued = true;
3724 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 break;
3726 }
3727 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003728 for (auto itr = pendingRequest.internalRequestList.begin();
3729 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003730 if (itr->need_metadata) {
3731 internalPproc = true;
3732 QCamera3ProcessingChannel *channel =
3733 (QCamera3ProcessingChannel *)itr->stream->priv;
3734 channel->queueReprocMetadata(metadata_buf);
3735 break;
3736 }
3737 }
3738
Thierry Strudel54dc9782017-02-15 12:12:10 -08003739 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003740
3741 bool *enableZsl = nullptr;
3742 if (gExposeEnableZslKey) {
3743 enableZsl = &pendingRequest.enableZsl;
3744 }
3745
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 resultMetadata = translateFromHalMetadata(metadata,
3747 pendingRequest.timestamp, pendingRequest.request_id,
3748 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3749 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003750 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003751 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003753 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003755 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003758
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003759 if (pendingRequest.blob_request) {
3760 //Dump tuning metadata if enabled and available
3761 char prop[PROPERTY_VALUE_MAX];
3762 memset(prop, 0, sizeof(prop));
3763 property_get("persist.camera.dumpmetadata", prop, "0");
3764 int32_t enabled = atoi(prop);
3765 if (enabled && metadata->is_tuning_params_valid) {
3766 dumpMetadataToFile(metadata->tuning_params,
3767 mMetaFrameCount,
3768 enabled,
3769 "Snapshot",
3770 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003771 }
3772 }
3773
3774 if (!internalPproc) {
3775 LOGD("couldn't find need_metadata for this metadata");
3776 // Return metadata buffer
3777 if (free_and_bufdone_meta_buf) {
3778 mMetadataChannel->bufDone(metadata_buf);
3779 free(metadata_buf);
3780 }
3781 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003782
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003784 }
3785 }
3786
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003787 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3788
3789 // Try to send out capture result metadata.
3790 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 return;
3792
Thierry Strudel3d639192016-09-09 11:52:26 -07003793done_metadata:
3794 for (pendingRequestIterator i = mPendingRequestsList.begin();
3795 i != mPendingRequestsList.end() ;i++) {
3796 i->pipeline_depth++;
3797 }
3798 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3799 unblockRequestIfNecessary();
3800}
3801
3802/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003803 * FUNCTION : handleDepthDataWithLock
3804 *
3805 * DESCRIPTION: Handles incoming depth data
3806 *
3807 * PARAMETERS : @depthData : Depth data
3808 * @frameNumber: Frame number of the incoming depth data
3809 *
3810 * RETURN :
3811 *
3812 *==========================================================================*/
3813void QCamera3HardwareInterface::handleDepthDataLocked(
3814 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3815 uint32_t currentFrameNumber;
3816 buffer_handle_t *depthBuffer;
3817
3818 if (nullptr == mDepthChannel) {
3819 LOGE("Depth channel not present!");
3820 return;
3821 }
3822
3823 camera3_stream_buffer_t resultBuffer =
3824 {.acquire_fence = -1,
3825 .release_fence = -1,
3826 .status = CAMERA3_BUFFER_STATUS_OK,
3827 .buffer = nullptr,
3828 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003829 do {
3830 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3831 if (nullptr == depthBuffer) {
3832 break;
3833 }
3834
Emilian Peev7650c122017-01-19 08:24:33 -08003835 resultBuffer.buffer = depthBuffer;
3836 if (currentFrameNumber == frameNumber) {
3837 int32_t rc = mDepthChannel->populateDepthData(depthData,
3838 frameNumber);
3839 if (NO_ERROR != rc) {
3840 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3841 } else {
3842 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3843 }
3844 } else if (currentFrameNumber > frameNumber) {
3845 break;
3846 } else {
3847 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3848 {{currentFrameNumber, mDepthChannel->getStream(),
3849 CAMERA3_MSG_ERROR_BUFFER}}};
3850 orchestrateNotify(&notify_msg);
3851
3852 LOGE("Depth buffer for frame number: %d is missing "
3853 "returning back!", currentFrameNumber);
3854 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3855 }
3856 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003857 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003858 } while (currentFrameNumber < frameNumber);
3859}
3860
3861/*===========================================================================
3862 * FUNCTION : notifyErrorFoPendingDepthData
3863 *
3864 * DESCRIPTION: Returns error for any pending depth buffers
3865 *
3866 * PARAMETERS : depthCh - depth channel that needs to get flushed
3867 *
3868 * RETURN :
3869 *
3870 *==========================================================================*/
3871void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3872 QCamera3DepthChannel *depthCh) {
3873 uint32_t currentFrameNumber;
3874 buffer_handle_t *depthBuffer;
3875
3876 if (nullptr == depthCh) {
3877 return;
3878 }
3879
3880 camera3_notify_msg_t notify_msg =
3881 {.type = CAMERA3_MSG_ERROR,
3882 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3883 camera3_stream_buffer_t resultBuffer =
3884 {.acquire_fence = -1,
3885 .release_fence = -1,
3886 .buffer = nullptr,
3887 .stream = depthCh->getStream(),
3888 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003889
3890 while (nullptr !=
3891 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3892 depthCh->unmapBuffer(currentFrameNumber);
3893
3894 notify_msg.message.error.frame_number = currentFrameNumber;
3895 orchestrateNotify(&notify_msg);
3896
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003897 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003898 };
3899}
3900
3901/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 * FUNCTION : hdrPlusPerfLock
3903 *
3904 * DESCRIPTION: perf lock for HDR+ using custom intent
3905 *
3906 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3907 *
3908 * RETURN : None
3909 *
3910 *==========================================================================*/
3911void QCamera3HardwareInterface::hdrPlusPerfLock(
3912 mm_camera_super_buf_t *metadata_buf)
3913{
3914 if (NULL == metadata_buf) {
3915 LOGE("metadata_buf is NULL");
3916 return;
3917 }
3918 metadata_buffer_t *metadata =
3919 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3920 int32_t *p_frame_number_valid =
3921 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3922 uint32_t *p_frame_number =
3923 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3924
3925 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3926 LOGE("%s: Invalid metadata", __func__);
3927 return;
3928 }
3929
3930 //acquire perf lock for 5 sec after the last HDR frame is captured
3931 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3932 if ((p_frame_number != NULL) &&
3933 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003934 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003935 }
3936 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003937}
3938
3939/*===========================================================================
3940 * FUNCTION : handleInputBufferWithLock
3941 *
3942 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3943 *
3944 * PARAMETERS : @frame_number: frame number of the input buffer
3945 *
3946 * RETURN :
3947 *
3948 *==========================================================================*/
3949void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3950{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003951 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003952 pendingRequestIterator i = mPendingRequestsList.begin();
3953 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3954 i++;
3955 }
3956 if (i != mPendingRequestsList.end() && i->input_buffer) {
3957 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003958 CameraMetadata settings;
3959 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3960 if(i->settings) {
3961 settings = i->settings;
3962 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3963 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07003964 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003965 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07003966 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003967 } else {
3968 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07003969 }
3970
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003971 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3972 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3973 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07003974
3975 camera3_capture_result result;
3976 memset(&result, 0, sizeof(camera3_capture_result));
3977 result.frame_number = frame_number;
3978 result.result = i->settings;
3979 result.input_buffer = i->input_buffer;
3980 result.partial_result = PARTIAL_RESULT_COUNT;
3981
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003982 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003983 LOGD("Input request metadata and input buffer frame_number = %u",
3984 i->frame_number);
3985 i = erasePendingRequest(i);
3986 } else {
3987 LOGE("Could not find input request for frame number %d", frame_number);
3988 }
3989}
3990
3991/*===========================================================================
3992 * FUNCTION : handleBufferWithLock
3993 *
3994 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3995 *
3996 * PARAMETERS : @buffer: image buffer for the callback
3997 * @frame_number: frame number of the image buffer
3998 *
3999 * RETURN :
4000 *
4001 *==========================================================================*/
4002void QCamera3HardwareInterface::handleBufferWithLock(
4003 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4004{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004005 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004006
4007 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4008 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4009 }
4010
Thierry Strudel3d639192016-09-09 11:52:26 -07004011 /* Nothing to be done during error state */
4012 if ((ERROR == mState) || (DEINIT == mState)) {
4013 return;
4014 }
4015 if (mFlushPerf) {
4016 handleBuffersDuringFlushLock(buffer);
4017 return;
4018 }
4019 //not in flush
4020 // If the frame number doesn't exist in the pending request list,
4021 // directly send the buffer to the frameworks, and update pending buffers map
4022 // Otherwise, book-keep the buffer.
4023 pendingRequestIterator i = mPendingRequestsList.begin();
4024 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4025 i++;
4026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004027
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004028 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004029 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004030 // For a reprocessing request, try to send out result metadata.
4031 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004033 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004034
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004035 // Check if this frame was dropped.
4036 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4037 m != mPendingFrameDropList.end(); m++) {
4038 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4039 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4040 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4041 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4042 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4043 frame_number, streamID);
4044 m = mPendingFrameDropList.erase(m);
4045 break;
4046 }
4047 }
4048
4049 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4050 LOGH("result frame_number = %d, buffer = %p",
4051 frame_number, buffer->buffer);
4052
4053 mPendingBuffersMap.removeBuf(buffer->buffer);
4054 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4055
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056 if (mPreviewStarted == false) {
4057 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4058 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004059 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4060
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004061 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4062 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4063 mPreviewStarted = true;
4064
4065 // Set power hint for preview
4066 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4067 }
4068 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004069}
4070
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004071void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004072 const camera_metadata_t *resultMetadata)
4073{
4074 // Find the pending request for this result metadata.
4075 auto requestIter = mPendingRequestsList.begin();
4076 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4077 requestIter++;
4078 }
4079
4080 if (requestIter == mPendingRequestsList.end()) {
4081 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4082 return;
4083 }
4084
4085 // Update the result metadata
4086 requestIter->resultMetadata = resultMetadata;
4087
4088 // Check what type of request this is.
4089 bool liveRequest = false;
4090 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004091 // HDR+ request doesn't have partial results.
4092 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004093 } else if (requestIter->input_buffer != nullptr) {
4094 // Reprocessing request result is the same as settings.
4095 requestIter->resultMetadata = requestIter->settings;
4096 // Reprocessing request doesn't have partial results.
4097 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4098 } else {
4099 liveRequest = true;
4100 requestIter->partial_result_cnt++;
4101 mPendingLiveRequest--;
4102
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004103 {
4104 Mutex::Autolock l(gHdrPlusClientLock);
4105 // For a live request, send the metadata to HDR+ client.
4106 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4107 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4108 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4109 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 }
4111 }
4112
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004113 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4114 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004115 bool readyToSend = true;
4116
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004117 // Iterate through the pending requests to send out result metadata that are ready. Also if
4118 // this result metadata belongs to a live request, notify errors for previous live requests
4119 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004120 auto iter = mPendingRequestsList.begin();
4121 while (iter != mPendingRequestsList.end()) {
4122 // Check if current pending request is ready. If it's not ready, the following pending
4123 // requests are also not ready.
4124 if (readyToSend && iter->resultMetadata == nullptr) {
4125 readyToSend = false;
4126 }
4127
4128 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4129
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004130 camera3_capture_result_t result = {};
4131 result.frame_number = iter->frame_number;
4132 result.result = iter->resultMetadata;
4133 result.partial_result = iter->partial_result_cnt;
4134
4135 // If this pending buffer has result metadata, we may be able to send out shutter callback
4136 // and result metadata.
4137 if (iter->resultMetadata != nullptr) {
4138 if (!readyToSend) {
4139 // If any of the previous pending request is not ready, this pending request is
4140 // also not ready to send in order to keep shutter callbacks and result metadata
4141 // in order.
4142 iter++;
4143 continue;
4144 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004145 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4146 // If the result metadata belongs to a live request, notify errors for previous pending
4147 // live requests.
4148 mPendingLiveRequest--;
4149
4150 CameraMetadata dummyMetadata;
4151 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4152 result.result = dummyMetadata.release();
4153
4154 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004155
4156 // partial_result should be PARTIAL_RESULT_CNT in case of
4157 // ERROR_RESULT.
4158 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4159 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004160 } else {
4161 iter++;
4162 continue;
4163 }
4164
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004165 result.output_buffers = nullptr;
4166 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 orchestrateResult(&result);
4168
4169 // For reprocessing, result metadata is the same as settings so do not free it here to
4170 // avoid double free.
4171 if (result.result != iter->settings) {
4172 free_camera_metadata((camera_metadata_t *)result.result);
4173 }
4174 iter->resultMetadata = nullptr;
4175 iter = erasePendingRequest(iter);
4176 }
4177
4178 if (liveRequest) {
4179 for (auto &iter : mPendingRequestsList) {
4180 // Increment pipeline depth for the following pending requests.
4181 if (iter.frame_number > frameNumber) {
4182 iter.pipeline_depth++;
4183 }
4184 }
4185 }
4186
4187 unblockRequestIfNecessary();
4188}
4189
Thierry Strudel3d639192016-09-09 11:52:26 -07004190/*===========================================================================
4191 * FUNCTION : unblockRequestIfNecessary
4192 *
4193 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4194 * that mMutex is held when this function is called.
4195 *
4196 * PARAMETERS :
4197 *
4198 * RETURN :
4199 *
4200 *==========================================================================*/
4201void QCamera3HardwareInterface::unblockRequestIfNecessary()
4202{
4203 // Unblock process_capture_request
4204 pthread_cond_signal(&mRequestCond);
4205}
4206
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004207/*===========================================================================
4208 * FUNCTION : isHdrSnapshotRequest
4209 *
4210 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4211 *
4212 * PARAMETERS : camera3 request structure
4213 *
4214 * RETURN : boolean decision variable
4215 *
4216 *==========================================================================*/
4217bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4218{
4219 if (request == NULL) {
4220 LOGE("Invalid request handle");
4221 assert(0);
4222 return false;
4223 }
4224
4225 if (!mForceHdrSnapshot) {
4226 CameraMetadata frame_settings;
4227 frame_settings = request->settings;
4228
4229 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4230 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4231 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4232 return false;
4233 }
4234 } else {
4235 return false;
4236 }
4237
4238 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4239 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4240 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4241 return false;
4242 }
4243 } else {
4244 return false;
4245 }
4246 }
4247
4248 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4249 if (request->output_buffers[i].stream->format
4250 == HAL_PIXEL_FORMAT_BLOB) {
4251 return true;
4252 }
4253 }
4254
4255 return false;
4256}
4257/*===========================================================================
4258 * FUNCTION : orchestrateRequest
4259 *
4260 * DESCRIPTION: Orchestrates a capture request from camera service
4261 *
4262 * PARAMETERS :
4263 * @request : request from framework to process
4264 *
4265 * RETURN : Error status codes
4266 *
4267 *==========================================================================*/
4268int32_t QCamera3HardwareInterface::orchestrateRequest(
4269 camera3_capture_request_t *request)
4270{
4271
4272 uint32_t originalFrameNumber = request->frame_number;
4273 uint32_t originalOutputCount = request->num_output_buffers;
4274 const camera_metadata_t *original_settings = request->settings;
4275 List<InternalRequest> internallyRequestedStreams;
4276 List<InternalRequest> emptyInternalList;
4277
4278 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4279 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4280 uint32_t internalFrameNumber;
4281 CameraMetadata modified_meta;
4282
4283
4284 /* Add Blob channel to list of internally requested streams */
4285 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4286 if (request->output_buffers[i].stream->format
4287 == HAL_PIXEL_FORMAT_BLOB) {
4288 InternalRequest streamRequested;
4289 streamRequested.meteringOnly = 1;
4290 streamRequested.need_metadata = 0;
4291 streamRequested.stream = request->output_buffers[i].stream;
4292 internallyRequestedStreams.push_back(streamRequested);
4293 }
4294 }
4295 request->num_output_buffers = 0;
4296 auto itr = internallyRequestedStreams.begin();
4297
4298 /* Modify setting to set compensation */
4299 modified_meta = request->settings;
4300 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4301 uint8_t aeLock = 1;
4302 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4303 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4304 camera_metadata_t *modified_settings = modified_meta.release();
4305 request->settings = modified_settings;
4306
4307 /* Capture Settling & -2x frame */
4308 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4309 request->frame_number = internalFrameNumber;
4310 processCaptureRequest(request, internallyRequestedStreams);
4311
4312 request->num_output_buffers = originalOutputCount;
4313 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4314 request->frame_number = internalFrameNumber;
4315 processCaptureRequest(request, emptyInternalList);
4316 request->num_output_buffers = 0;
4317
4318 modified_meta = modified_settings;
4319 expCompensation = 0;
4320 aeLock = 1;
4321 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4322 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4323 modified_settings = modified_meta.release();
4324 request->settings = modified_settings;
4325
4326 /* Capture Settling & 0X frame */
4327
4328 itr = internallyRequestedStreams.begin();
4329 if (itr == internallyRequestedStreams.end()) {
4330 LOGE("Error Internally Requested Stream list is empty");
4331 assert(0);
4332 } else {
4333 itr->need_metadata = 0;
4334 itr->meteringOnly = 1;
4335 }
4336
4337 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4338 request->frame_number = internalFrameNumber;
4339 processCaptureRequest(request, internallyRequestedStreams);
4340
4341 itr = internallyRequestedStreams.begin();
4342 if (itr == internallyRequestedStreams.end()) {
4343 ALOGE("Error Internally Requested Stream list is empty");
4344 assert(0);
4345 } else {
4346 itr->need_metadata = 1;
4347 itr->meteringOnly = 0;
4348 }
4349
4350 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4351 request->frame_number = internalFrameNumber;
4352 processCaptureRequest(request, internallyRequestedStreams);
4353
4354 /* Capture 2X frame*/
4355 modified_meta = modified_settings;
4356 expCompensation = GB_HDR_2X_STEP_EV;
4357 aeLock = 1;
4358 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4359 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4360 modified_settings = modified_meta.release();
4361 request->settings = modified_settings;
4362
4363 itr = internallyRequestedStreams.begin();
4364 if (itr == internallyRequestedStreams.end()) {
4365 ALOGE("Error Internally Requested Stream list is empty");
4366 assert(0);
4367 } else {
4368 itr->need_metadata = 0;
4369 itr->meteringOnly = 1;
4370 }
4371 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4372 request->frame_number = internalFrameNumber;
4373 processCaptureRequest(request, internallyRequestedStreams);
4374
4375 itr = internallyRequestedStreams.begin();
4376 if (itr == internallyRequestedStreams.end()) {
4377 ALOGE("Error Internally Requested Stream list is empty");
4378 assert(0);
4379 } else {
4380 itr->need_metadata = 1;
4381 itr->meteringOnly = 0;
4382 }
4383
4384 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4385 request->frame_number = internalFrameNumber;
4386 processCaptureRequest(request, internallyRequestedStreams);
4387
4388
4389 /* Capture 2X on original streaming config*/
4390 internallyRequestedStreams.clear();
4391
4392 /* Restore original settings pointer */
4393 request->settings = original_settings;
4394 } else {
4395 uint32_t internalFrameNumber;
4396 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4397 request->frame_number = internalFrameNumber;
4398 return processCaptureRequest(request, internallyRequestedStreams);
4399 }
4400
4401 return NO_ERROR;
4402}
4403
4404/*===========================================================================
4405 * FUNCTION : orchestrateResult
4406 *
4407 * DESCRIPTION: Orchestrates a capture result to camera service
4408 *
4409 * PARAMETERS :
4410 * @request : request from framework to process
4411 *
4412 * RETURN :
4413 *
4414 *==========================================================================*/
4415void QCamera3HardwareInterface::orchestrateResult(
4416 camera3_capture_result_t *result)
4417{
4418 uint32_t frameworkFrameNumber;
4419 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4420 frameworkFrameNumber);
4421 if (rc != NO_ERROR) {
4422 LOGE("Cannot find translated frameworkFrameNumber");
4423 assert(0);
4424 } else {
4425 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004426 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004427 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004428 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004429 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4430 camera_metadata_entry_t entry;
4431 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4432 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004433 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004434 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4435 if (ret != OK)
4436 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004437 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004438 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004439 result->frame_number = frameworkFrameNumber;
4440 mCallbackOps->process_capture_result(mCallbackOps, result);
4441 }
4442 }
4443}
4444
4445/*===========================================================================
4446 * FUNCTION : orchestrateNotify
4447 *
4448 * DESCRIPTION: Orchestrates a notify to camera service
4449 *
4450 * PARAMETERS :
4451 * @request : request from framework to process
4452 *
4453 * RETURN :
4454 *
4455 *==========================================================================*/
4456void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4457{
4458 uint32_t frameworkFrameNumber;
4459 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004460 int32_t rc = NO_ERROR;
4461
4462 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004463 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004464
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004465 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004466 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4467 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4468 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004469 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004470 LOGE("Cannot find translated frameworkFrameNumber");
4471 assert(0);
4472 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004473 }
4474 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004475
4476 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4477 LOGD("Internal Request drop the notifyCb");
4478 } else {
4479 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4480 mCallbackOps->notify(mCallbackOps, notify_msg);
4481 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004482}
4483
4484/*===========================================================================
4485 * FUNCTION : FrameNumberRegistry
4486 *
4487 * DESCRIPTION: Constructor
4488 *
4489 * PARAMETERS :
4490 *
4491 * RETURN :
4492 *
4493 *==========================================================================*/
4494FrameNumberRegistry::FrameNumberRegistry()
4495{
4496 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4497}
4498
4499/*===========================================================================
4500 * FUNCTION : ~FrameNumberRegistry
4501 *
4502 * DESCRIPTION: Destructor
4503 *
4504 * PARAMETERS :
4505 *
4506 * RETURN :
4507 *
4508 *==========================================================================*/
4509FrameNumberRegistry::~FrameNumberRegistry()
4510{
4511}
4512
4513/*===========================================================================
4514 * FUNCTION : PurgeOldEntriesLocked
4515 *
4516 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4517 *
4518 * PARAMETERS :
4519 *
4520 * RETURN : NONE
4521 *
4522 *==========================================================================*/
4523void FrameNumberRegistry::purgeOldEntriesLocked()
4524{
4525 while (_register.begin() != _register.end()) {
4526 auto itr = _register.begin();
4527 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4528 _register.erase(itr);
4529 } else {
4530 return;
4531 }
4532 }
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : allocStoreInternalFrameNumber
4537 *
4538 * DESCRIPTION: Method to note down a framework request and associate a new
4539 * internal request number against it
4540 *
4541 * PARAMETERS :
4542 * @fFrameNumber: Identifier given by framework
4543 * @internalFN : Output parameter which will have the newly generated internal
4544 * entry
4545 *
4546 * RETURN : Error code
4547 *
4548 *==========================================================================*/
4549int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4550 uint32_t &internalFrameNumber)
4551{
4552 Mutex::Autolock lock(mRegistryLock);
4553 internalFrameNumber = _nextFreeInternalNumber++;
4554 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4555 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4556 purgeOldEntriesLocked();
4557 return NO_ERROR;
4558}
4559
4560/*===========================================================================
4561 * FUNCTION : generateStoreInternalFrameNumber
4562 *
4563 * DESCRIPTION: Method to associate a new internal request number independent
4564 * of any associate with framework requests
4565 *
4566 * PARAMETERS :
4567 * @internalFrame#: Output parameter which will have the newly generated internal
4568 *
4569 *
4570 * RETURN : Error code
4571 *
4572 *==========================================================================*/
4573int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4574{
4575 Mutex::Autolock lock(mRegistryLock);
4576 internalFrameNumber = _nextFreeInternalNumber++;
4577 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4578 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4579 purgeOldEntriesLocked();
4580 return NO_ERROR;
4581}
4582
4583/*===========================================================================
4584 * FUNCTION : getFrameworkFrameNumber
4585 *
4586 * DESCRIPTION: Method to query the framework framenumber given an internal #
4587 *
4588 * PARAMETERS :
4589 * @internalFrame#: Internal reference
4590 * @frameworkframenumber: Output parameter holding framework frame entry
4591 *
4592 * RETURN : Error code
4593 *
4594 *==========================================================================*/
4595int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4596 uint32_t &frameworkFrameNumber)
4597{
4598 Mutex::Autolock lock(mRegistryLock);
4599 auto itr = _register.find(internalFrameNumber);
4600 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004601 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004602 return -ENOENT;
4603 }
4604
4605 frameworkFrameNumber = itr->second;
4606 purgeOldEntriesLocked();
4607 return NO_ERROR;
4608}
Thierry Strudel3d639192016-09-09 11:52:26 -07004609
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004610status_t QCamera3HardwareInterface::fillPbStreamConfig(
4611 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4612 QCamera3Channel *channel, uint32_t streamIndex) {
4613 if (config == nullptr) {
4614 LOGE("%s: config is null", __FUNCTION__);
4615 return BAD_VALUE;
4616 }
4617
4618 if (channel == nullptr) {
4619 LOGE("%s: channel is null", __FUNCTION__);
4620 return BAD_VALUE;
4621 }
4622
4623 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4624 if (stream == nullptr) {
4625 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4626 return NAME_NOT_FOUND;
4627 }
4628
4629 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4630 if (streamInfo == nullptr) {
4631 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4632 return NAME_NOT_FOUND;
4633 }
4634
4635 config->id = pbStreamId;
4636 config->image.width = streamInfo->dim.width;
4637 config->image.height = streamInfo->dim.height;
4638 config->image.padding = 0;
4639 config->image.format = pbStreamFormat;
4640
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004641 uint32_t totalPlaneSize = 0;
4642
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004643 // Fill plane information.
4644 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4645 pbcamera::PlaneConfiguration plane;
4646 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4647 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4648 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004649
4650 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004651 }
4652
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004653 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004654 return OK;
4655}
4656
Thierry Strudel3d639192016-09-09 11:52:26 -07004657/*===========================================================================
4658 * FUNCTION : processCaptureRequest
4659 *
4660 * DESCRIPTION: process a capture request from camera service
4661 *
4662 * PARAMETERS :
4663 * @request : request from framework to process
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004669 camera3_capture_request_t *request,
4670 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004671{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004672 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004673 int rc = NO_ERROR;
4674 int32_t request_id;
4675 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004676 bool isVidBufRequested = false;
4677 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004678 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004679
4680 pthread_mutex_lock(&mMutex);
4681
4682 // Validate current state
4683 switch (mState) {
4684 case CONFIGURED:
4685 case STARTED:
4686 /* valid state */
4687 break;
4688
4689 case ERROR:
4690 pthread_mutex_unlock(&mMutex);
4691 handleCameraDeviceError();
4692 return -ENODEV;
4693
4694 default:
4695 LOGE("Invalid state %d", mState);
4696 pthread_mutex_unlock(&mMutex);
4697 return -ENODEV;
4698 }
4699
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004700 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004701 if (rc != NO_ERROR) {
4702 LOGE("incoming request is not valid");
4703 pthread_mutex_unlock(&mMutex);
4704 return rc;
4705 }
4706
4707 meta = request->settings;
4708
4709 // For first capture request, send capture intent, and
4710 // stream on all streams
4711 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004712 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004713 // send an unconfigure to the backend so that the isp
4714 // resources are deallocated
4715 if (!mFirstConfiguration) {
4716 cam_stream_size_info_t stream_config_info;
4717 int32_t hal_version = CAM_HAL_V3;
4718 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4719 stream_config_info.buffer_info.min_buffers =
4720 MIN_INFLIGHT_REQUESTS;
4721 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004722 m_bIs4KVideo ? 0 :
4723 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004724 clear_metadata_buffer(mParameters);
4725 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4726 CAM_INTF_PARM_HAL_VERSION, hal_version);
4727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4728 CAM_INTF_META_STREAM_INFO, stream_config_info);
4729 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4730 mParameters);
4731 if (rc < 0) {
4732 LOGE("set_parms for unconfigure failed");
4733 pthread_mutex_unlock(&mMutex);
4734 return rc;
4735 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004736
Thierry Strudel3d639192016-09-09 11:52:26 -07004737 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004738 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004739 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004740 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004741 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004742 property_get("persist.camera.is_type", is_type_value, "4");
4743 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4744 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4745 property_get("persist.camera.is_type_preview", is_type_value, "4");
4746 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4747 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004748
4749 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4750 int32_t hal_version = CAM_HAL_V3;
4751 uint8_t captureIntent =
4752 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4753 mCaptureIntent = captureIntent;
4754 clear_metadata_buffer(mParameters);
4755 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4756 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4757 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004758 if (mFirstConfiguration) {
4759 // configure instant AEC
4760 // Instant AEC is a session based parameter and it is needed only
4761 // once per complete session after open camera.
4762 // i.e. This is set only once for the first capture request, after open camera.
4763 setInstantAEC(meta);
4764 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004765 uint8_t fwkVideoStabMode=0;
4766 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4767 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4768 }
4769
Xue Tuecac74e2017-04-17 13:58:15 -07004770 // If EIS setprop is enabled then only turn it on for video/preview
4771 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004772 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 int32_t vsMode;
4774 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4776 rc = BAD_VALUE;
4777 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 LOGD("setEis %d", setEis);
4779 bool eis3Supported = false;
4780 size_t count = IS_TYPE_MAX;
4781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4782 for (size_t i = 0; i < count; i++) {
4783 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4784 eis3Supported = true;
4785 break;
4786 }
4787 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004788
4789 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004790 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4792 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004793 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4794 is_type = isTypePreview;
4795 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4796 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4797 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004799 } else {
4800 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004801 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004802 } else {
4803 is_type = IS_TYPE_NONE;
4804 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004806 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004807 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4808 }
4809 }
4810
4811 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4812 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4813
Thierry Strudel54dc9782017-02-15 12:12:10 -08004814 //Disable tintless only if the property is set to 0
4815 memset(prop, 0, sizeof(prop));
4816 property_get("persist.camera.tintless.enable", prop, "1");
4817 int32_t tintless_value = atoi(prop);
4818
Thierry Strudel3d639192016-09-09 11:52:26 -07004819 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4820 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004821
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 //Disable CDS for HFR mode or if DIS/EIS is on.
4823 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4824 //after every configure_stream
4825 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4826 (m_bIsVideo)) {
4827 int32_t cds = CAM_CDS_MODE_OFF;
4828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4829 CAM_INTF_PARM_CDS_MODE, cds))
4830 LOGE("Failed to disable CDS for HFR mode");
4831
4832 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004833
4834 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4835 uint8_t* use_av_timer = NULL;
4836
4837 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004838 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004839 use_av_timer = &m_debug_avtimer;
4840 }
4841 else{
4842 use_av_timer =
4843 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004844 if (use_av_timer) {
4845 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4846 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 }
4848
4849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4850 rc = BAD_VALUE;
4851 }
4852 }
4853
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 setMobicat();
4855
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004856 uint8_t nrMode = 0;
4857 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4858 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4859 }
4860
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 /* Set fps and hfr mode while sending meta stream info so that sensor
4862 * can configure appropriate streaming mode */
4863 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4865 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4867 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004868 if (rc == NO_ERROR) {
4869 int32_t max_fps =
4870 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004871 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004872 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4873 }
4874 /* For HFR, more buffers are dequeued upfront to improve the performance */
4875 if (mBatchSize) {
4876 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4877 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4878 }
4879 }
4880 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 LOGE("setHalFpsRange failed");
4882 }
4883 }
4884 if (meta.exists(ANDROID_CONTROL_MODE)) {
4885 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4886 rc = extractSceneMode(meta, metaMode, mParameters);
4887 if (rc != NO_ERROR) {
4888 LOGE("extractSceneMode failed");
4889 }
4890 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004891 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004892
Thierry Strudel04e026f2016-10-10 11:27:36 -07004893 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4894 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4895 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4896 rc = setVideoHdrMode(mParameters, vhdr);
4897 if (rc != NO_ERROR) {
4898 LOGE("setVideoHDR is failed");
4899 }
4900 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004902 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004903 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004904 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004905 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4907 sensorModeFullFov)) {
4908 rc = BAD_VALUE;
4909 }
4910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 //TODO: validate the arguments, HSV scenemode should have only the
4912 //advertised fps ranges
4913
4914 /*set the capture intent, hal version, tintless, stream info,
4915 *and disenable parameters to the backend*/
4916 LOGD("set_parms META_STREAM_INFO " );
4917 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004918 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4919 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 mStreamConfigInfo.type[i],
4921 mStreamConfigInfo.stream_sizes[i].width,
4922 mStreamConfigInfo.stream_sizes[i].height,
4923 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004924 mStreamConfigInfo.format[i],
4925 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4929 mParameters);
4930 if (rc < 0) {
4931 LOGE("set_parms failed for hal version, stream info");
4932 }
4933
Chien-Yu Chenee335912017-02-09 17:53:20 -08004934 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4935 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004936 if (rc != NO_ERROR) {
4937 LOGE("Failed to get sensor output size");
4938 pthread_mutex_unlock(&mMutex);
4939 goto error_exit;
4940 }
4941
4942 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4943 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004944 mSensorModeInfo.active_array_size.width,
4945 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004946
4947 /* Set batchmode before initializing channel. Since registerBuffer
4948 * internally initializes some of the channels, better set batchmode
4949 * even before first register buffer */
4950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4951 it != mStreamInfo.end(); it++) {
4952 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4953 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4954 && mBatchSize) {
4955 rc = channel->setBatchSize(mBatchSize);
4956 //Disable per frame map unmap for HFR/batchmode case
4957 rc |= channel->setPerFrameMapUnmap(false);
4958 if (NO_ERROR != rc) {
4959 LOGE("Channel init failed %d", rc);
4960 pthread_mutex_unlock(&mMutex);
4961 goto error_exit;
4962 }
4963 }
4964 }
4965
4966 //First initialize all streams
4967 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4968 it != mStreamInfo.end(); it++) {
4969 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004970
4971 /* Initial value of NR mode is needed before stream on */
4972 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07004973 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4974 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004975 setEis) {
4976 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4977 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4978 is_type = mStreamConfigInfo.is_type[i];
4979 break;
4980 }
4981 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004982 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 rc = channel->initialize(IS_TYPE_NONE);
4985 }
4986 if (NO_ERROR != rc) {
4987 LOGE("Channel initialization failed %d", rc);
4988 pthread_mutex_unlock(&mMutex);
4989 goto error_exit;
4990 }
4991 }
4992
4993 if (mRawDumpChannel) {
4994 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4995 if (rc != NO_ERROR) {
4996 LOGE("Error: Raw Dump Channel init failed");
4997 pthread_mutex_unlock(&mMutex);
4998 goto error_exit;
4999 }
5000 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005001 if (mHdrPlusRawSrcChannel) {
5002 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5003 if (rc != NO_ERROR) {
5004 LOGE("Error: HDR+ RAW Source Channel init failed");
5005 pthread_mutex_unlock(&mMutex);
5006 goto error_exit;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 if (mSupportChannel) {
5010 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5011 if (rc < 0) {
5012 LOGE("Support channel initialization failed");
5013 pthread_mutex_unlock(&mMutex);
5014 goto error_exit;
5015 }
5016 }
5017 if (mAnalysisChannel) {
5018 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5019 if (rc < 0) {
5020 LOGE("Analysis channel initialization failed");
5021 pthread_mutex_unlock(&mMutex);
5022 goto error_exit;
5023 }
5024 }
5025 if (mDummyBatchChannel) {
5026 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5027 if (rc < 0) {
5028 LOGE("mDummyBatchChannel setBatchSize failed");
5029 pthread_mutex_unlock(&mMutex);
5030 goto error_exit;
5031 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 if (rc < 0) {
5034 LOGE("mDummyBatchChannel initialization failed");
5035 pthread_mutex_unlock(&mMutex);
5036 goto error_exit;
5037 }
5038 }
5039
5040 // Set bundle info
5041 rc = setBundleInfo();
5042 if (rc < 0) {
5043 LOGE("setBundleInfo failed %d", rc);
5044 pthread_mutex_unlock(&mMutex);
5045 goto error_exit;
5046 }
5047
5048 //update settings from app here
5049 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5050 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5051 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5052 }
5053 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5054 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5055 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5056 }
5057 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5058 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5059 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5060
5061 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5062 (mLinkedCameraId != mCameraId) ) {
5063 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5064 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 goto error_exit;
5067 }
5068 }
5069
5070 // add bundle related cameras
5071 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5072 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005073 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5074 &m_pDualCamCmdPtr->bundle_info;
5075 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 if (mIsDeviceLinked)
5077 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5078 else
5079 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5080
5081 pthread_mutex_lock(&gCamLock);
5082
5083 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5084 LOGE("Dualcam: Invalid Session Id ");
5085 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005086 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005087 goto error_exit;
5088 }
5089
5090 if (mIsMainCamera == 1) {
5091 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5092 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005093 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005094 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 // related session id should be session id of linked session
5096 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5097 } else {
5098 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5099 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005100 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005101 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005102 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5103 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005104 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 pthread_mutex_unlock(&gCamLock);
5106
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005107 rc = mCameraHandle->ops->set_dual_cam_cmd(
5108 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if (rc < 0) {
5110 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005111 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 goto error_exit;
5113 }
5114 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005115 goto no_error;
5116error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005117 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 return rc;
5119no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 mWokenUpByDaemon = false;
5121 mPendingLiveRequest = 0;
5122 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 }
5124
5125 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005126 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005127
5128 if (mFlushPerf) {
5129 //we cannot accept any requests during flush
5130 LOGE("process_capture_request cannot proceed during flush");
5131 pthread_mutex_unlock(&mMutex);
5132 return NO_ERROR; //should return an error
5133 }
5134
5135 if (meta.exists(ANDROID_REQUEST_ID)) {
5136 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5137 mCurrentRequestId = request_id;
5138 LOGD("Received request with id: %d", request_id);
5139 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5140 LOGE("Unable to find request id field, \
5141 & no previous id available");
5142 pthread_mutex_unlock(&mMutex);
5143 return NAME_NOT_FOUND;
5144 } else {
5145 LOGD("Re-using old request id");
5146 request_id = mCurrentRequestId;
5147 }
5148
5149 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5150 request->num_output_buffers,
5151 request->input_buffer,
5152 frameNumber);
5153 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005154 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005156 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 uint32_t snapshotStreamId = 0;
5158 for (size_t i = 0; i < request->num_output_buffers; i++) {
5159 const camera3_stream_buffer_t& output = request->output_buffers[i];
5160 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5161
Emilian Peev7650c122017-01-19 08:24:33 -08005162 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5163 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005164 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 blob_request = 1;
5166 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5167 }
5168
5169 if (output.acquire_fence != -1) {
5170 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5171 close(output.acquire_fence);
5172 if (rc != OK) {
5173 LOGE("sync wait failed %d", rc);
5174 pthread_mutex_unlock(&mMutex);
5175 return rc;
5176 }
5177 }
5178
Emilian Peev0f3c3162017-03-15 12:57:46 +00005179 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5180 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005181 depthRequestPresent = true;
5182 continue;
5183 }
5184
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005185 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005186 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005187
5188 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5189 isVidBufRequested = true;
5190 }
5191 }
5192
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005193 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5194 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5195 itr++) {
5196 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5197 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5198 channel->getStreamID(channel->getStreamTypeMask());
5199
5200 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5201 isVidBufRequested = true;
5202 }
5203 }
5204
Thierry Strudel3d639192016-09-09 11:52:26 -07005205 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005206 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 }
5209 if (blob_request && mRawDumpChannel) {
5210 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005211 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005212 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005213 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 }
5215
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005216 {
5217 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5218 // Request a RAW buffer if
5219 // 1. mHdrPlusRawSrcChannel is valid.
5220 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5221 // 3. There is no pending HDR+ request.
5222 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5223 mHdrPlusPendingRequests.size() == 0) {
5224 streamsArray.stream_request[streamsArray.num_streams].streamID =
5225 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5226 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5227 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005228 }
5229
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005230 //extract capture intent
5231 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5232 mCaptureIntent =
5233 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5234 }
5235
5236 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5237 mCacMode =
5238 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5239 }
5240
5241 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005242 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005243
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005244 {
5245 Mutex::Autolock l(gHdrPlusClientLock);
5246 // If this request has a still capture intent, try to submit an HDR+ request.
5247 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5248 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5249 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5250 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005251 }
5252
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005253 if (hdrPlusRequest) {
5254 // For a HDR+ request, just set the frame parameters.
5255 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5256 if (rc < 0) {
5257 LOGE("fail to set frame parameters");
5258 pthread_mutex_unlock(&mMutex);
5259 return rc;
5260 }
5261 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 /* Parse the settings:
5263 * - For every request in NORMAL MODE
5264 * - For every request in HFR mode during preview only case
5265 * - For first request of every batch in HFR mode during video
5266 * recording. In batchmode the same settings except frame number is
5267 * repeated in each request of the batch.
5268 */
5269 if (!mBatchSize ||
5270 (mBatchSize && !isVidBufRequested) ||
5271 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005272 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005273 if (rc < 0) {
5274 LOGE("fail to set frame parameters");
5275 pthread_mutex_unlock(&mMutex);
5276 return rc;
5277 }
5278 }
5279 /* For batchMode HFR, setFrameParameters is not called for every
5280 * request. But only frame number of the latest request is parsed.
5281 * Keep track of first and last frame numbers in a batch so that
5282 * metadata for the frame numbers of batch can be duplicated in
5283 * handleBatchMetadta */
5284 if (mBatchSize) {
5285 if (!mToBeQueuedVidBufs) {
5286 //start of the batch
5287 mFirstFrameNumberInBatch = request->frame_number;
5288 }
5289 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5290 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5291 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005292 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 return BAD_VALUE;
5294 }
5295 }
5296 if (mNeedSensorRestart) {
5297 /* Unlock the mutex as restartSensor waits on the channels to be
5298 * stopped, which in turn calls stream callback functions -
5299 * handleBufferWithLock and handleMetadataWithLock */
5300 pthread_mutex_unlock(&mMutex);
5301 rc = dynamicUpdateMetaStreamInfo();
5302 if (rc != NO_ERROR) {
5303 LOGE("Restarting the sensor failed");
5304 return BAD_VALUE;
5305 }
5306 mNeedSensorRestart = false;
5307 pthread_mutex_lock(&mMutex);
5308 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005309 if(mResetInstantAEC) {
5310 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5311 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5312 mResetInstantAEC = false;
5313 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005314 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 if (request->input_buffer->acquire_fence != -1) {
5316 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5317 close(request->input_buffer->acquire_fence);
5318 if (rc != OK) {
5319 LOGE("input buffer sync wait failed %d", rc);
5320 pthread_mutex_unlock(&mMutex);
5321 return rc;
5322 }
5323 }
5324 }
5325
5326 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5327 mLastCustIntentFrmNum = frameNumber;
5328 }
5329 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 pendingRequestIterator latestRequest;
5332 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005333 pendingRequest.num_buffers = depthRequestPresent ?
5334 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005335 pendingRequest.request_id = request_id;
5336 pendingRequest.blob_request = blob_request;
5337 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 if (request->input_buffer) {
5339 pendingRequest.input_buffer =
5340 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5341 *(pendingRequest.input_buffer) = *(request->input_buffer);
5342 pInputBuffer = pendingRequest.input_buffer;
5343 } else {
5344 pendingRequest.input_buffer = NULL;
5345 pInputBuffer = NULL;
5346 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005347 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005348
5349 pendingRequest.pipeline_depth = 0;
5350 pendingRequest.partial_result_cnt = 0;
5351 extractJpegMetadata(mCurJpegMeta, request);
5352 pendingRequest.jpegMetadata = mCurJpegMeta;
5353 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005354 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005355 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5356 mHybridAeEnable =
5357 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5358 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005359
5360 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5361 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005362 /* DevCamDebug metadata processCaptureRequest */
5363 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5364 mDevCamDebugMetaEnable =
5365 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5366 }
5367 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5368 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005369
5370 //extract CAC info
5371 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5372 mCacMode =
5373 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5374 }
5375 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005376 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005377
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005378 // extract enableZsl info
5379 if (gExposeEnableZslKey) {
5380 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5381 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5382 mZslEnabled = pendingRequest.enableZsl;
5383 } else {
5384 pendingRequest.enableZsl = mZslEnabled;
5385 }
5386 }
5387
Thierry Strudel3d639192016-09-09 11:52:26 -07005388 PendingBuffersInRequest bufsForCurRequest;
5389 bufsForCurRequest.frame_number = frameNumber;
5390 // Mark current timestamp for the new request
5391 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005392 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005393
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005394 if (hdrPlusRequest) {
5395 // Save settings for this request.
5396 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5397 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5398
5399 // Add to pending HDR+ request queue.
5400 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5401 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5402
5403 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5404 }
5405
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005407 if ((request->output_buffers[i].stream->data_space ==
5408 HAL_DATASPACE_DEPTH) &&
5409 (HAL_PIXEL_FORMAT_BLOB ==
5410 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005411 continue;
5412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 RequestedBufferInfo requestedBuf;
5414 memset(&requestedBuf, 0, sizeof(requestedBuf));
5415 requestedBuf.stream = request->output_buffers[i].stream;
5416 requestedBuf.buffer = NULL;
5417 pendingRequest.buffers.push_back(requestedBuf);
5418
5419 // Add to buffer handle the pending buffers list
5420 PendingBufferInfo bufferInfo;
5421 bufferInfo.buffer = request->output_buffers[i].buffer;
5422 bufferInfo.stream = request->output_buffers[i].stream;
5423 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5424 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5425 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5426 frameNumber, bufferInfo.buffer,
5427 channel->getStreamTypeMask(), bufferInfo.stream->format);
5428 }
5429 // Add this request packet into mPendingBuffersMap
5430 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5431 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5432 mPendingBuffersMap.get_num_overall_buffers());
5433
5434 latestRequest = mPendingRequestsList.insert(
5435 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005436
5437 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5438 // for the frame number.
5439 mShutterDispatcher.expectShutter(frameNumber);
5440 for (size_t i = 0; i < request->num_output_buffers; i++) {
5441 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5442 }
5443
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 if(mFlush) {
5445 LOGI("mFlush is true");
5446 pthread_mutex_unlock(&mMutex);
5447 return NO_ERROR;
5448 }
5449
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5451 // channel.
5452 if (!hdrPlusRequest) {
5453 int indexUsed;
5454 // Notify metadata channel we receive a request
5455 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005456
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 if(request->input_buffer != NULL){
5458 LOGD("Input request, frame_number %d", frameNumber);
5459 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5460 if (NO_ERROR != rc) {
5461 LOGE("fail to set reproc parameters");
5462 pthread_mutex_unlock(&mMutex);
5463 return rc;
5464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005465 }
5466
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005467 // Call request on other streams
5468 uint32_t streams_need_metadata = 0;
5469 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5470 for (size_t i = 0; i < request->num_output_buffers; i++) {
5471 const camera3_stream_buffer_t& output = request->output_buffers[i];
5472 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5473
5474 if (channel == NULL) {
5475 LOGW("invalid channel pointer for stream");
5476 continue;
5477 }
5478
5479 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5480 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5481 output.buffer, request->input_buffer, frameNumber);
5482 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005483 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005484 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5485 if (rc < 0) {
5486 LOGE("Fail to request on picture channel");
5487 pthread_mutex_unlock(&mMutex);
5488 return rc;
5489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005491 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5492 assert(NULL != mDepthChannel);
5493 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005494
Emilian Peev7650c122017-01-19 08:24:33 -08005495 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5496 if (rc < 0) {
5497 LOGE("Fail to map on depth buffer");
5498 pthread_mutex_unlock(&mMutex);
5499 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005500 }
Emilian Peev7650c122017-01-19 08:24:33 -08005501 } else {
5502 LOGD("snapshot request with buffer %p, frame_number %d",
5503 output.buffer, frameNumber);
5504 if (!request->settings) {
5505 rc = channel->request(output.buffer, frameNumber,
5506 NULL, mPrevParameters, indexUsed);
5507 } else {
5508 rc = channel->request(output.buffer, frameNumber,
5509 NULL, mParameters, indexUsed);
5510 }
5511 if (rc < 0) {
5512 LOGE("Fail to request on picture channel");
5513 pthread_mutex_unlock(&mMutex);
5514 return rc;
5515 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516
Emilian Peev7650c122017-01-19 08:24:33 -08005517 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5518 uint32_t j = 0;
5519 for (j = 0; j < streamsArray.num_streams; j++) {
5520 if (streamsArray.stream_request[j].streamID == streamId) {
5521 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5522 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5523 else
5524 streamsArray.stream_request[j].buf_index = indexUsed;
5525 break;
5526 }
5527 }
5528 if (j == streamsArray.num_streams) {
5529 LOGE("Did not find matching stream to update index");
5530 assert(0);
5531 }
5532
5533 pendingBufferIter->need_metadata = true;
5534 streams_need_metadata++;
5535 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005536 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005537 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5538 bool needMetadata = false;
5539 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5540 rc = yuvChannel->request(output.buffer, frameNumber,
5541 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5542 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005543 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005544 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 pthread_mutex_unlock(&mMutex);
5546 return rc;
5547 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005548
5549 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5550 uint32_t j = 0;
5551 for (j = 0; j < streamsArray.num_streams; j++) {
5552 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5554 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5555 else
5556 streamsArray.stream_request[j].buf_index = indexUsed;
5557 break;
5558 }
5559 }
5560 if (j == streamsArray.num_streams) {
5561 LOGE("Did not find matching stream to update index");
5562 assert(0);
5563 }
5564
5565 pendingBufferIter->need_metadata = needMetadata;
5566 if (needMetadata)
5567 streams_need_metadata += 1;
5568 LOGD("calling YUV channel request, need_metadata is %d",
5569 needMetadata);
5570 } else {
5571 LOGD("request with buffer %p, frame_number %d",
5572 output.buffer, frameNumber);
5573
5574 rc = channel->request(output.buffer, frameNumber, indexUsed);
5575
5576 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5577 uint32_t j = 0;
5578 for (j = 0; j < streamsArray.num_streams; j++) {
5579 if (streamsArray.stream_request[j].streamID == streamId) {
5580 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5581 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5582 else
5583 streamsArray.stream_request[j].buf_index = indexUsed;
5584 break;
5585 }
5586 }
5587 if (j == streamsArray.num_streams) {
5588 LOGE("Did not find matching stream to update index");
5589 assert(0);
5590 }
5591
5592 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5593 && mBatchSize) {
5594 mToBeQueuedVidBufs++;
5595 if (mToBeQueuedVidBufs == mBatchSize) {
5596 channel->queueBatchBuf();
5597 }
5598 }
5599 if (rc < 0) {
5600 LOGE("request failed");
5601 pthread_mutex_unlock(&mMutex);
5602 return rc;
5603 }
5604 }
5605 pendingBufferIter++;
5606 }
5607
5608 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5609 itr++) {
5610 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5611
5612 if (channel == NULL) {
5613 LOGE("invalid channel pointer for stream");
5614 assert(0);
5615 return BAD_VALUE;
5616 }
5617
5618 InternalRequest requestedStream;
5619 requestedStream = (*itr);
5620
5621
5622 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5623 LOGD("snapshot request internally input buffer %p, frame_number %d",
5624 request->input_buffer, frameNumber);
5625 if(request->input_buffer != NULL){
5626 rc = channel->request(NULL, frameNumber,
5627 pInputBuffer, &mReprocMeta, indexUsed, true,
5628 requestedStream.meteringOnly);
5629 if (rc < 0) {
5630 LOGE("Fail to request on picture channel");
5631 pthread_mutex_unlock(&mMutex);
5632 return rc;
5633 }
5634 } else {
5635 LOGD("snapshot request with frame_number %d", frameNumber);
5636 if (!request->settings) {
5637 rc = channel->request(NULL, frameNumber,
5638 NULL, mPrevParameters, indexUsed, true,
5639 requestedStream.meteringOnly);
5640 } else {
5641 rc = channel->request(NULL, frameNumber,
5642 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5643 }
5644 if (rc < 0) {
5645 LOGE("Fail to request on picture channel");
5646 pthread_mutex_unlock(&mMutex);
5647 return rc;
5648 }
5649
5650 if ((*itr).meteringOnly != 1) {
5651 requestedStream.need_metadata = 1;
5652 streams_need_metadata++;
5653 }
5654 }
5655
5656 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5657 uint32_t j = 0;
5658 for (j = 0; j < streamsArray.num_streams; j++) {
5659 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005660 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5661 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5662 else
5663 streamsArray.stream_request[j].buf_index = indexUsed;
5664 break;
5665 }
5666 }
5667 if (j == streamsArray.num_streams) {
5668 LOGE("Did not find matching stream to update index");
5669 assert(0);
5670 }
5671
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005672 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005673 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005674 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005675 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005676 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005677 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005678 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 //If 2 streams have need_metadata set to true, fail the request, unless
5681 //we copy/reference count the metadata buffer
5682 if (streams_need_metadata > 1) {
5683 LOGE("not supporting request in which two streams requires"
5684 " 2 HAL metadata for reprocessing");
5685 pthread_mutex_unlock(&mMutex);
5686 return -EINVAL;
5687 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005688
Emilian Peev7650c122017-01-19 08:24:33 -08005689 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5691 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5692 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5693 pthread_mutex_unlock(&mMutex);
5694 return BAD_VALUE;
5695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005696 if (request->input_buffer == NULL) {
5697 /* Set the parameters to backend:
5698 * - For every request in NORMAL MODE
5699 * - For every request in HFR mode during preview only case
5700 * - Once every batch in HFR mode during video recording
5701 */
5702 if (!mBatchSize ||
5703 (mBatchSize && !isVidBufRequested) ||
5704 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5705 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5706 mBatchSize, isVidBufRequested,
5707 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005708
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005709 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5710 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5711 uint32_t m = 0;
5712 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5713 if (streamsArray.stream_request[k].streamID ==
5714 mBatchedStreamsArray.stream_request[m].streamID)
5715 break;
5716 }
5717 if (m == mBatchedStreamsArray.num_streams) {
5718 mBatchedStreamsArray.stream_request\
5719 [mBatchedStreamsArray.num_streams].streamID =
5720 streamsArray.stream_request[k].streamID;
5721 mBatchedStreamsArray.stream_request\
5722 [mBatchedStreamsArray.num_streams].buf_index =
5723 streamsArray.stream_request[k].buf_index;
5724 mBatchedStreamsArray.num_streams =
5725 mBatchedStreamsArray.num_streams + 1;
5726 }
5727 }
5728 streamsArray = mBatchedStreamsArray;
5729 }
5730 /* Update stream id of all the requested buffers */
5731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5732 streamsArray)) {
5733 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005734 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005735 return BAD_VALUE;
5736 }
5737
5738 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5739 mParameters);
5740 if (rc < 0) {
5741 LOGE("set_parms failed");
5742 }
5743 /* reset to zero coz, the batch is queued */
5744 mToBeQueuedVidBufs = 0;
5745 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5746 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5747 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005748 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5749 uint32_t m = 0;
5750 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5751 if (streamsArray.stream_request[k].streamID ==
5752 mBatchedStreamsArray.stream_request[m].streamID)
5753 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005754 }
5755 if (m == mBatchedStreamsArray.num_streams) {
5756 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5757 streamID = streamsArray.stream_request[k].streamID;
5758 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5759 buf_index = streamsArray.stream_request[k].buf_index;
5760 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762 }
5763 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005764 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005765
5766 // Start all streams after the first setting is sent, so that the
5767 // setting can be applied sooner: (0 + apply_delay)th frame.
5768 if (mState == CONFIGURED && mChannelHandle) {
5769 //Then start them.
5770 LOGH("Start META Channel");
5771 rc = mMetadataChannel->start();
5772 if (rc < 0) {
5773 LOGE("META channel start failed");
5774 pthread_mutex_unlock(&mMutex);
5775 return rc;
5776 }
5777
5778 if (mAnalysisChannel) {
5779 rc = mAnalysisChannel->start();
5780 if (rc < 0) {
5781 LOGE("Analysis channel start failed");
5782 mMetadataChannel->stop();
5783 pthread_mutex_unlock(&mMutex);
5784 return rc;
5785 }
5786 }
5787
5788 if (mSupportChannel) {
5789 rc = mSupportChannel->start();
5790 if (rc < 0) {
5791 LOGE("Support channel start failed");
5792 mMetadataChannel->stop();
5793 /* Although support and analysis are mutually exclusive today
5794 adding it in anycase for future proofing */
5795 if (mAnalysisChannel) {
5796 mAnalysisChannel->stop();
5797 }
5798 pthread_mutex_unlock(&mMutex);
5799 return rc;
5800 }
5801 }
5802 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5803 it != mStreamInfo.end(); it++) {
5804 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5805 LOGH("Start Processing Channel mask=%d",
5806 channel->getStreamTypeMask());
5807 rc = channel->start();
5808 if (rc < 0) {
5809 LOGE("channel start failed");
5810 pthread_mutex_unlock(&mMutex);
5811 return rc;
5812 }
5813 }
5814
5815 if (mRawDumpChannel) {
5816 LOGD("Starting raw dump stream");
5817 rc = mRawDumpChannel->start();
5818 if (rc != NO_ERROR) {
5819 LOGE("Error Starting Raw Dump Channel");
5820 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5821 it != mStreamInfo.end(); it++) {
5822 QCamera3Channel *channel =
5823 (QCamera3Channel *)(*it)->stream->priv;
5824 LOGH("Stopping Processing Channel mask=%d",
5825 channel->getStreamTypeMask());
5826 channel->stop();
5827 }
5828 if (mSupportChannel)
5829 mSupportChannel->stop();
5830 if (mAnalysisChannel) {
5831 mAnalysisChannel->stop();
5832 }
5833 mMetadataChannel->stop();
5834 pthread_mutex_unlock(&mMutex);
5835 return rc;
5836 }
5837 }
5838
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005839 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005840 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005841 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005842 if (rc != NO_ERROR) {
5843 LOGE("start_channel failed %d", rc);
5844 pthread_mutex_unlock(&mMutex);
5845 return rc;
5846 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005847
5848 {
5849 // Configure Easel for stream on.
5850 Mutex::Autolock l(gHdrPlusClientLock);
5851 if (EaselManagerClientOpened) {
5852 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005853 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5854 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005855 if (rc != OK) {
5856 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5857 mCameraId, mSensorModeInfo.op_pixel_clk);
5858 pthread_mutex_unlock(&mMutex);
5859 return rc;
5860 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005861 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005862 }
5863 }
5864
5865 // Start sensor streaming.
5866 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5867 mChannelHandle);
5868 if (rc != NO_ERROR) {
5869 LOGE("start_sensor_stream_on failed %d", rc);
5870 pthread_mutex_unlock(&mMutex);
5871 return rc;
5872 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005873 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005874 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005875 }
5876
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005877 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5878 {
5879 Mutex::Autolock l(gHdrPlusClientLock);
5880 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5881 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5882 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5883 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5884 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5885 rc = enableHdrPlusModeLocked();
5886 if (rc != OK) {
5887 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5888 pthread_mutex_unlock(&mMutex);
5889 return rc;
5890 }
5891
5892 mFirstPreviewIntentSeen = true;
5893 }
5894 }
5895
Thierry Strudel3d639192016-09-09 11:52:26 -07005896 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5897
5898 mState = STARTED;
5899 // Added a timed condition wait
5900 struct timespec ts;
5901 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005902 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005903 if (rc < 0) {
5904 isValidTimeout = 0;
5905 LOGE("Error reading the real time clock!!");
5906 }
5907 else {
5908 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005909 int64_t timeout = 5;
5910 {
5911 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5912 // If there is a pending HDR+ request, the following requests may be blocked until the
5913 // HDR+ request is done. So allow a longer timeout.
5914 if (mHdrPlusPendingRequests.size() > 0) {
5915 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5916 }
5917 }
5918 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005919 }
5920 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005921 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005922 (mState != ERROR) && (mState != DEINIT)) {
5923 if (!isValidTimeout) {
5924 LOGD("Blocking on conditional wait");
5925 pthread_cond_wait(&mRequestCond, &mMutex);
5926 }
5927 else {
5928 LOGD("Blocking on timed conditional wait");
5929 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5930 if (rc == ETIMEDOUT) {
5931 rc = -ENODEV;
5932 LOGE("Unblocked on timeout!!!!");
5933 break;
5934 }
5935 }
5936 LOGD("Unblocked");
5937 if (mWokenUpByDaemon) {
5938 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005939 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005940 break;
5941 }
5942 }
5943 pthread_mutex_unlock(&mMutex);
5944
5945 return rc;
5946}
5947
5948/*===========================================================================
5949 * FUNCTION : dump
5950 *
5951 * DESCRIPTION:
5952 *
5953 * PARAMETERS :
5954 *
5955 *
5956 * RETURN :
5957 *==========================================================================*/
5958void QCamera3HardwareInterface::dump(int fd)
5959{
5960 pthread_mutex_lock(&mMutex);
5961 dprintf(fd, "\n Camera HAL3 information Begin \n");
5962
5963 dprintf(fd, "\nNumber of pending requests: %zu \n",
5964 mPendingRequestsList.size());
5965 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5966 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5967 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5968 for(pendingRequestIterator i = mPendingRequestsList.begin();
5969 i != mPendingRequestsList.end(); i++) {
5970 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5971 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5972 i->input_buffer);
5973 }
5974 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5975 mPendingBuffersMap.get_num_overall_buffers());
5976 dprintf(fd, "-------+------------------\n");
5977 dprintf(fd, " Frame | Stream type mask \n");
5978 dprintf(fd, "-------+------------------\n");
5979 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5980 for(auto &j : req.mPendingBufferList) {
5981 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5982 dprintf(fd, " %5d | %11d \n",
5983 req.frame_number, channel->getStreamTypeMask());
5984 }
5985 }
5986 dprintf(fd, "-------+------------------\n");
5987
5988 dprintf(fd, "\nPending frame drop list: %zu\n",
5989 mPendingFrameDropList.size());
5990 dprintf(fd, "-------+-----------\n");
5991 dprintf(fd, " Frame | Stream ID \n");
5992 dprintf(fd, "-------+-----------\n");
5993 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5994 i != mPendingFrameDropList.end(); i++) {
5995 dprintf(fd, " %5d | %9d \n",
5996 i->frame_number, i->stream_ID);
5997 }
5998 dprintf(fd, "-------+-----------\n");
5999
6000 dprintf(fd, "\n Camera HAL3 information End \n");
6001
6002 /* use dumpsys media.camera as trigger to send update debug level event */
6003 mUpdateDebugLevel = true;
6004 pthread_mutex_unlock(&mMutex);
6005 return;
6006}
6007
6008/*===========================================================================
6009 * FUNCTION : flush
6010 *
6011 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6012 * conditionally restarts channels
6013 *
6014 * PARAMETERS :
6015 * @ restartChannels: re-start all channels
6016 *
6017 *
6018 * RETURN :
6019 * 0 on success
6020 * Error code on failure
6021 *==========================================================================*/
6022int QCamera3HardwareInterface::flush(bool restartChannels)
6023{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006024 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006025 int32_t rc = NO_ERROR;
6026
6027 LOGD("Unblocking Process Capture Request");
6028 pthread_mutex_lock(&mMutex);
6029 mFlush = true;
6030 pthread_mutex_unlock(&mMutex);
6031
6032 rc = stopAllChannels();
6033 // unlink of dualcam
6034 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006035 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6036 &m_pDualCamCmdPtr->bundle_info;
6037 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006038 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6039 pthread_mutex_lock(&gCamLock);
6040
6041 if (mIsMainCamera == 1) {
6042 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6043 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006044 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 // related session id should be session id of linked session
6046 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6047 } else {
6048 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6049 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006050 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006051 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6052 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006053 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006054 pthread_mutex_unlock(&gCamLock);
6055
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006056 rc = mCameraHandle->ops->set_dual_cam_cmd(
6057 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006058 if (rc < 0) {
6059 LOGE("Dualcam: Unlink failed, but still proceed to close");
6060 }
6061 }
6062
6063 if (rc < 0) {
6064 LOGE("stopAllChannels failed");
6065 return rc;
6066 }
6067 if (mChannelHandle) {
6068 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6069 mChannelHandle);
6070 }
6071
6072 // Reset bundle info
6073 rc = setBundleInfo();
6074 if (rc < 0) {
6075 LOGE("setBundleInfo failed %d", rc);
6076 return rc;
6077 }
6078
6079 // Mutex Lock
6080 pthread_mutex_lock(&mMutex);
6081
6082 // Unblock process_capture_request
6083 mPendingLiveRequest = 0;
6084 pthread_cond_signal(&mRequestCond);
6085
6086 rc = notifyErrorForPendingRequests();
6087 if (rc < 0) {
6088 LOGE("notifyErrorForPendingRequests failed");
6089 pthread_mutex_unlock(&mMutex);
6090 return rc;
6091 }
6092
6093 mFlush = false;
6094
6095 // Start the Streams/Channels
6096 if (restartChannels) {
6097 rc = startAllChannels();
6098 if (rc < 0) {
6099 LOGE("startAllChannels failed");
6100 pthread_mutex_unlock(&mMutex);
6101 return rc;
6102 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006103 if (mChannelHandle) {
6104 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006105 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006106 if (rc < 0) {
6107 LOGE("start_channel failed");
6108 pthread_mutex_unlock(&mMutex);
6109 return rc;
6110 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006111 }
6112 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006113 pthread_mutex_unlock(&mMutex);
6114
6115 return 0;
6116}
6117
6118/*===========================================================================
6119 * FUNCTION : flushPerf
6120 *
6121 * DESCRIPTION: This is the performance optimization version of flush that does
6122 * not use stream off, rather flushes the system
6123 *
6124 * PARAMETERS :
6125 *
6126 *
6127 * RETURN : 0 : success
6128 * -EINVAL: input is malformed (device is not valid)
6129 * -ENODEV: if the device has encountered a serious error
6130 *==========================================================================*/
6131int QCamera3HardwareInterface::flushPerf()
6132{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006133 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 int32_t rc = 0;
6135 struct timespec timeout;
6136 bool timed_wait = false;
6137
6138 pthread_mutex_lock(&mMutex);
6139 mFlushPerf = true;
6140 mPendingBuffersMap.numPendingBufsAtFlush =
6141 mPendingBuffersMap.get_num_overall_buffers();
6142 LOGD("Calling flush. Wait for %d buffers to return",
6143 mPendingBuffersMap.numPendingBufsAtFlush);
6144
6145 /* send the flush event to the backend */
6146 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6147 if (rc < 0) {
6148 LOGE("Error in flush: IOCTL failure");
6149 mFlushPerf = false;
6150 pthread_mutex_unlock(&mMutex);
6151 return -ENODEV;
6152 }
6153
6154 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6155 LOGD("No pending buffers in HAL, return flush");
6156 mFlushPerf = false;
6157 pthread_mutex_unlock(&mMutex);
6158 return rc;
6159 }
6160
6161 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006162 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006163 if (rc < 0) {
6164 LOGE("Error reading the real time clock, cannot use timed wait");
6165 } else {
6166 timeout.tv_sec += FLUSH_TIMEOUT;
6167 timed_wait = true;
6168 }
6169
6170 //Block on conditional variable
6171 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6172 LOGD("Waiting on mBuffersCond");
6173 if (!timed_wait) {
6174 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6175 if (rc != 0) {
6176 LOGE("pthread_cond_wait failed due to rc = %s",
6177 strerror(rc));
6178 break;
6179 }
6180 } else {
6181 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6182 if (rc != 0) {
6183 LOGE("pthread_cond_timedwait failed due to rc = %s",
6184 strerror(rc));
6185 break;
6186 }
6187 }
6188 }
6189 if (rc != 0) {
6190 mFlushPerf = false;
6191 pthread_mutex_unlock(&mMutex);
6192 return -ENODEV;
6193 }
6194
6195 LOGD("Received buffers, now safe to return them");
6196
6197 //make sure the channels handle flush
6198 //currently only required for the picture channel to release snapshot resources
6199 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6200 it != mStreamInfo.end(); it++) {
6201 QCamera3Channel *channel = (*it)->channel;
6202 if (channel) {
6203 rc = channel->flush();
6204 if (rc) {
6205 LOGE("Flushing the channels failed with error %d", rc);
6206 // even though the channel flush failed we need to continue and
6207 // return the buffers we have to the framework, however the return
6208 // value will be an error
6209 rc = -ENODEV;
6210 }
6211 }
6212 }
6213
6214 /* notify the frameworks and send errored results */
6215 rc = notifyErrorForPendingRequests();
6216 if (rc < 0) {
6217 LOGE("notifyErrorForPendingRequests failed");
6218 pthread_mutex_unlock(&mMutex);
6219 return rc;
6220 }
6221
6222 //unblock process_capture_request
6223 mPendingLiveRequest = 0;
6224 unblockRequestIfNecessary();
6225
6226 mFlushPerf = false;
6227 pthread_mutex_unlock(&mMutex);
6228 LOGD ("Flush Operation complete. rc = %d", rc);
6229 return rc;
6230}
6231
6232/*===========================================================================
6233 * FUNCTION : handleCameraDeviceError
6234 *
6235 * DESCRIPTION: This function calls internal flush and notifies the error to
6236 * framework and updates the state variable.
6237 *
6238 * PARAMETERS : None
6239 *
6240 * RETURN : NO_ERROR on Success
6241 * Error code on failure
6242 *==========================================================================*/
6243int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6244{
6245 int32_t rc = NO_ERROR;
6246
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006247 {
6248 Mutex::Autolock lock(mFlushLock);
6249 pthread_mutex_lock(&mMutex);
6250 if (mState != ERROR) {
6251 //if mState != ERROR, nothing to be done
6252 pthread_mutex_unlock(&mMutex);
6253 return NO_ERROR;
6254 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006255 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006256
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006257 rc = flush(false /* restart channels */);
6258 if (NO_ERROR != rc) {
6259 LOGE("internal flush to handle mState = ERROR failed");
6260 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006261
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006262 pthread_mutex_lock(&mMutex);
6263 mState = DEINIT;
6264 pthread_mutex_unlock(&mMutex);
6265 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006266
6267 camera3_notify_msg_t notify_msg;
6268 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6269 notify_msg.type = CAMERA3_MSG_ERROR;
6270 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6271 notify_msg.message.error.error_stream = NULL;
6272 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006273 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006274
6275 return rc;
6276}
6277
6278/*===========================================================================
6279 * FUNCTION : captureResultCb
6280 *
6281 * DESCRIPTION: Callback handler for all capture result
6282 * (streams, as well as metadata)
6283 *
6284 * PARAMETERS :
6285 * @metadata : metadata information
6286 * @buffer : actual gralloc buffer to be returned to frameworks.
6287 * NULL if metadata.
6288 *
6289 * RETURN : NONE
6290 *==========================================================================*/
6291void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6292 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6293{
6294 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006295 pthread_mutex_lock(&mMutex);
6296 uint8_t batchSize = mBatchSize;
6297 pthread_mutex_unlock(&mMutex);
6298 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006299 handleBatchMetadata(metadata_buf,
6300 true /* free_and_bufdone_meta_buf */);
6301 } else { /* mBatchSize = 0 */
6302 hdrPlusPerfLock(metadata_buf);
6303 pthread_mutex_lock(&mMutex);
6304 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006305 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006306 true /* last urgent frame of batch metadata */,
6307 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006308 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006309 pthread_mutex_unlock(&mMutex);
6310 }
6311 } else if (isInputBuffer) {
6312 pthread_mutex_lock(&mMutex);
6313 handleInputBufferWithLock(frame_number);
6314 pthread_mutex_unlock(&mMutex);
6315 } else {
6316 pthread_mutex_lock(&mMutex);
6317 handleBufferWithLock(buffer, frame_number);
6318 pthread_mutex_unlock(&mMutex);
6319 }
6320 return;
6321}
6322
6323/*===========================================================================
6324 * FUNCTION : getReprocessibleOutputStreamId
6325 *
6326 * DESCRIPTION: Get source output stream id for the input reprocess stream
6327 * based on size and format, which would be the largest
6328 * output stream if an input stream exists.
6329 *
6330 * PARAMETERS :
6331 * @id : return the stream id if found
6332 *
6333 * RETURN : int32_t type of status
6334 * NO_ERROR -- success
6335 * none-zero failure code
6336 *==========================================================================*/
6337int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6338{
6339 /* check if any output or bidirectional stream with the same size and format
6340 and return that stream */
6341 if ((mInputStreamInfo.dim.width > 0) &&
6342 (mInputStreamInfo.dim.height > 0)) {
6343 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6344 it != mStreamInfo.end(); it++) {
6345
6346 camera3_stream_t *stream = (*it)->stream;
6347 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6348 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6349 (stream->format == mInputStreamInfo.format)) {
6350 // Usage flag for an input stream and the source output stream
6351 // may be different.
6352 LOGD("Found reprocessible output stream! %p", *it);
6353 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6354 stream->usage, mInputStreamInfo.usage);
6355
6356 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6357 if (channel != NULL && channel->mStreams[0]) {
6358 id = channel->mStreams[0]->getMyServerID();
6359 return NO_ERROR;
6360 }
6361 }
6362 }
6363 } else {
6364 LOGD("No input stream, so no reprocessible output stream");
6365 }
6366 return NAME_NOT_FOUND;
6367}
6368
6369/*===========================================================================
6370 * FUNCTION : lookupFwkName
6371 *
6372 * DESCRIPTION: In case the enum is not same in fwk and backend
6373 * make sure the parameter is correctly propogated
6374 *
6375 * PARAMETERS :
6376 * @arr : map between the two enums
6377 * @len : len of the map
6378 * @hal_name : name of the hal_parm to map
6379 *
6380 * RETURN : int type of status
6381 * fwk_name -- success
6382 * none-zero failure code
6383 *==========================================================================*/
6384template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6385 size_t len, halType hal_name)
6386{
6387
6388 for (size_t i = 0; i < len; i++) {
6389 if (arr[i].hal_name == hal_name) {
6390 return arr[i].fwk_name;
6391 }
6392 }
6393
6394 /* Not able to find matching framework type is not necessarily
6395 * an error case. This happens when mm-camera supports more attributes
6396 * than the frameworks do */
6397 LOGH("Cannot find matching framework type");
6398 return NAME_NOT_FOUND;
6399}
6400
6401/*===========================================================================
6402 * FUNCTION : lookupHalName
6403 *
6404 * DESCRIPTION: In case the enum is not same in fwk and backend
6405 * make sure the parameter is correctly propogated
6406 *
6407 * PARAMETERS :
6408 * @arr : map between the two enums
6409 * @len : len of the map
6410 * @fwk_name : name of the hal_parm to map
6411 *
6412 * RETURN : int32_t type of status
6413 * hal_name -- success
6414 * none-zero failure code
6415 *==========================================================================*/
6416template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6417 size_t len, fwkType fwk_name)
6418{
6419 for (size_t i = 0; i < len; i++) {
6420 if (arr[i].fwk_name == fwk_name) {
6421 return arr[i].hal_name;
6422 }
6423 }
6424
6425 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6426 return NAME_NOT_FOUND;
6427}
6428
6429/*===========================================================================
6430 * FUNCTION : lookupProp
6431 *
6432 * DESCRIPTION: lookup a value by its name
6433 *
6434 * PARAMETERS :
6435 * @arr : map between the two enums
6436 * @len : size of the map
6437 * @name : name to be looked up
6438 *
6439 * RETURN : Value if found
6440 * CAM_CDS_MODE_MAX if not found
6441 *==========================================================================*/
6442template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6443 size_t len, const char *name)
6444{
6445 if (name) {
6446 for (size_t i = 0; i < len; i++) {
6447 if (!strcmp(arr[i].desc, name)) {
6448 return arr[i].val;
6449 }
6450 }
6451 }
6452 return CAM_CDS_MODE_MAX;
6453}
6454
6455/*===========================================================================
6456 *
6457 * DESCRIPTION:
6458 *
6459 * PARAMETERS :
6460 * @metadata : metadata information from callback
6461 * @timestamp: metadata buffer timestamp
6462 * @request_id: request id
6463 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006464 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006465 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6466 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006467 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006468 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6469 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006470 *
6471 * RETURN : camera_metadata_t*
6472 * metadata in a format specified by fwk
6473 *==========================================================================*/
6474camera_metadata_t*
6475QCamera3HardwareInterface::translateFromHalMetadata(
6476 metadata_buffer_t *metadata,
6477 nsecs_t timestamp,
6478 int32_t request_id,
6479 const CameraMetadata& jpegMetadata,
6480 uint8_t pipeline_depth,
6481 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006482 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006483 /* DevCamDebug metadata translateFromHalMetadata argument */
6484 uint8_t DevCamDebug_meta_enable,
6485 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006486 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006487 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006488 bool lastMetadataInBatch,
6489 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006490{
6491 CameraMetadata camMetadata;
6492 camera_metadata_t *resultMetadata;
6493
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006494 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006495 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6496 * Timestamp is needed because it's used for shutter notify calculation.
6497 * */
6498 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6499 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006500 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006501 }
6502
Thierry Strudel3d639192016-09-09 11:52:26 -07006503 if (jpegMetadata.entryCount())
6504 camMetadata.append(jpegMetadata);
6505
6506 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6507 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6508 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6509 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006510 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006511 if (mBatchSize == 0) {
6512 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6513 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6514 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006515
Samuel Ha68ba5172016-12-15 18:41:12 -08006516 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6517 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6518 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6519 // DevCamDebug metadata translateFromHalMetadata AF
6520 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6521 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6522 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6523 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6524 }
6525 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6526 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6527 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6528 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6529 }
6530 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6531 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6532 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6533 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6534 }
6535 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6536 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6537 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6538 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6539 }
6540 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6541 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6542 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6543 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6544 }
6545 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6546 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6547 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6548 *DevCamDebug_af_monitor_pdaf_target_pos;
6549 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6550 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6551 }
6552 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6553 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6554 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6555 *DevCamDebug_af_monitor_pdaf_confidence;
6556 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6557 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6558 }
6559 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6560 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6561 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6562 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6563 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6564 }
6565 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6566 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6567 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6568 *DevCamDebug_af_monitor_tof_target_pos;
6569 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6570 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6571 }
6572 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6573 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6574 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6575 *DevCamDebug_af_monitor_tof_confidence;
6576 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6577 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6578 }
6579 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6580 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6581 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6582 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6583 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6584 }
6585 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6586 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6587 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6588 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6589 &fwk_DevCamDebug_af_monitor_type_select, 1);
6590 }
6591 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6592 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6593 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6594 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6595 &fwk_DevCamDebug_af_monitor_refocus, 1);
6596 }
6597 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6598 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6599 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6600 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6601 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6602 }
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6604 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6605 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6606 *DevCamDebug_af_search_pdaf_target_pos;
6607 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6608 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6609 }
6610 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6611 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6612 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6613 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6614 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6615 }
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6617 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6618 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6619 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6620 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6621 }
6622 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6623 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6624 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6625 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6626 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6627 }
6628 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6629 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6630 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6631 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6632 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6633 }
6634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6635 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6636 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6637 *DevCamDebug_af_search_tof_target_pos;
6638 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6639 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6642 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6643 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6644 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6645 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6646 }
6647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6648 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6649 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6650 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6651 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6652 }
6653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6654 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6655 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6656 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6657 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6660 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6661 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6662 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6663 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6664 }
6665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6666 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6667 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6668 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6669 &fwk_DevCamDebug_af_search_type_select, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6672 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6673 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6675 &fwk_DevCamDebug_af_search_next_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6678 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6679 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6680 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6681 &fwk_DevCamDebug_af_search_target_pos, 1);
6682 }
6683 // DevCamDebug metadata translateFromHalMetadata AEC
6684 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6685 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6686 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6687 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6690 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6691 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6692 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6693 }
6694 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6695 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6696 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6697 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6698 }
6699 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6700 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6701 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6702 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6703 }
6704 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6705 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6706 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6707 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6708 }
6709 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6710 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6711 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6712 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6715 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6716 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6717 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6718 }
6719 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6720 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6721 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6722 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6723 }
Samuel Ha34229982017-02-17 13:51:11 -08006724 // DevCamDebug metadata translateFromHalMetadata zzHDR
6725 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6726 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6727 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6728 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6731 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006732 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006733 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6734 }
6735 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6736 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6737 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6738 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6739 }
6740 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6741 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006742 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006743 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6744 }
6745 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6746 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6747 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6748 *DevCamDebug_aec_hdr_sensitivity_ratio;
6749 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6750 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6751 }
6752 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6753 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6754 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6755 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6756 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6757 }
6758 // DevCamDebug metadata translateFromHalMetadata ADRC
6759 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6760 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6761 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6762 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6763 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6764 }
6765 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6766 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6767 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6768 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6769 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6770 }
6771 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6772 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6773 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6774 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6775 }
6776 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6777 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6778 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6779 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6780 }
6781 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6782 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6783 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6784 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6785 }
6786 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6787 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6788 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6789 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6790 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // DevCamDebug metadata translateFromHalMetadata AWB
6792 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6793 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6794 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6795 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6796 }
6797 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6798 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6799 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6800 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6801 }
6802 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6803 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6804 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6805 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6808 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6809 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6810 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6813 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6814 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6815 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6816 }
6817 }
6818 // atrace_end(ATRACE_TAG_ALWAYS);
6819
Thierry Strudel3d639192016-09-09 11:52:26 -07006820 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6821 int64_t fwk_frame_number = *frame_number;
6822 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6823 }
6824
6825 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6826 int32_t fps_range[2];
6827 fps_range[0] = (int32_t)float_range->min_fps;
6828 fps_range[1] = (int32_t)float_range->max_fps;
6829 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6830 fps_range, 2);
6831 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6832 fps_range[0], fps_range[1]);
6833 }
6834
6835 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6836 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6837 }
6838
6839 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6840 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6841 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6842 *sceneMode);
6843 if (NAME_NOT_FOUND != val) {
6844 uint8_t fwkSceneMode = (uint8_t)val;
6845 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6846 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6847 fwkSceneMode);
6848 }
6849 }
6850
6851 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6852 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6853 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6854 }
6855
6856 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6857 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6858 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6859 }
6860
6861 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6862 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6863 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6864 }
6865
6866 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6867 CAM_INTF_META_EDGE_MODE, metadata) {
6868 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6869 }
6870
6871 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6872 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6873 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6874 }
6875
6876 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6877 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6878 }
6879
6880 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6881 if (0 <= *flashState) {
6882 uint8_t fwk_flashState = (uint8_t) *flashState;
6883 if (!gCamCapability[mCameraId]->flash_available) {
6884 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6885 }
6886 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6887 }
6888 }
6889
6890 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6891 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6892 if (NAME_NOT_FOUND != val) {
6893 uint8_t fwk_flashMode = (uint8_t)val;
6894 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6895 }
6896 }
6897
6898 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6899 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6900 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6901 }
6902
6903 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6904 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6905 }
6906
6907 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6908 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6909 }
6910
6911 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6912 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6913 }
6914
6915 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6916 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6917 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6918 }
6919
6920 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6921 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6922 LOGD("fwk_videoStab = %d", fwk_videoStab);
6923 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6924 } else {
6925 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6926 // and so hardcoding the Video Stab result to OFF mode.
6927 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6928 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006929 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006930 }
6931
6932 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6933 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6934 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6935 }
6936
6937 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6938 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6939 }
6940
Thierry Strudel3d639192016-09-09 11:52:26 -07006941 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6942 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006943 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006944
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006945 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6946 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006947
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006948 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006949 blackLevelAppliedPattern->cam_black_level[0],
6950 blackLevelAppliedPattern->cam_black_level[1],
6951 blackLevelAppliedPattern->cam_black_level[2],
6952 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006953 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6954 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006955
6956#ifndef USE_HAL_3_3
6957 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306958 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006959 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306960 fwk_blackLevelInd[0] /= 16.0;
6961 fwk_blackLevelInd[1] /= 16.0;
6962 fwk_blackLevelInd[2] /= 16.0;
6963 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006964 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6965 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006966#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006967 }
6968
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006969#ifndef USE_HAL_3_3
6970 // Fixed whitelevel is used by ISP/Sensor
6971 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6972 &gCamCapability[mCameraId]->white_level, 1);
6973#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006974
6975 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6976 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6977 int32_t scalerCropRegion[4];
6978 scalerCropRegion[0] = hScalerCropRegion->left;
6979 scalerCropRegion[1] = hScalerCropRegion->top;
6980 scalerCropRegion[2] = hScalerCropRegion->width;
6981 scalerCropRegion[3] = hScalerCropRegion->height;
6982
6983 // Adjust crop region from sensor output coordinate system to active
6984 // array coordinate system.
6985 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6986 scalerCropRegion[2], scalerCropRegion[3]);
6987
6988 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6989 }
6990
6991 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6992 LOGD("sensorExpTime = %lld", *sensorExpTime);
6993 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6994 }
6995
6996 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6997 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6998 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6999 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7000 }
7001
7002 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7003 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7004 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7005 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7006 sensorRollingShutterSkew, 1);
7007 }
7008
7009 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7010 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7011 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7012
7013 //calculate the noise profile based on sensitivity
7014 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7015 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7016 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7017 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7018 noise_profile[i] = noise_profile_S;
7019 noise_profile[i+1] = noise_profile_O;
7020 }
7021 LOGD("noise model entry (S, O) is (%f, %f)",
7022 noise_profile_S, noise_profile_O);
7023 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7024 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7025 }
7026
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007027#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007028 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007029 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007030 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007031 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007032 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7033 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7034 }
7035 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007036#endif
7037
Thierry Strudel3d639192016-09-09 11:52:26 -07007038 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7039 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7040 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7041 }
7042
7043 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7044 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7045 *faceDetectMode);
7046 if (NAME_NOT_FOUND != val) {
7047 uint8_t fwk_faceDetectMode = (uint8_t)val;
7048 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7049
7050 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7051 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7052 CAM_INTF_META_FACE_DETECTION, metadata) {
7053 uint8_t numFaces = MIN(
7054 faceDetectionInfo->num_faces_detected, MAX_ROI);
7055 int32_t faceIds[MAX_ROI];
7056 uint8_t faceScores[MAX_ROI];
7057 int32_t faceRectangles[MAX_ROI * 4];
7058 int32_t faceLandmarks[MAX_ROI * 6];
7059 size_t j = 0, k = 0;
7060
7061 for (size_t i = 0; i < numFaces; i++) {
7062 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7063 // Adjust crop region from sensor output coordinate system to active
7064 // array coordinate system.
7065 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7066 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7067 rect.width, rect.height);
7068
7069 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7070 faceRectangles+j, -1);
7071
Jason Lee8ce36fa2017-04-19 19:40:37 -07007072 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7073 "bottom-right (%d, %d)",
7074 faceDetectionInfo->frame_id, i,
7075 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7076 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7077
Thierry Strudel3d639192016-09-09 11:52:26 -07007078 j+= 4;
7079 }
7080 if (numFaces <= 0) {
7081 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7082 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7083 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7084 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7085 }
7086
7087 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7088 numFaces);
7089 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7090 faceRectangles, numFaces * 4U);
7091 if (fwk_faceDetectMode ==
7092 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7093 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7094 CAM_INTF_META_FACE_LANDMARK, metadata) {
7095
7096 for (size_t i = 0; i < numFaces; i++) {
7097 // Map the co-ordinate sensor output coordinate system to active
7098 // array coordinate system.
7099 mCropRegionMapper.toActiveArray(
7100 landmarks->face_landmarks[i].left_eye_center.x,
7101 landmarks->face_landmarks[i].left_eye_center.y);
7102 mCropRegionMapper.toActiveArray(
7103 landmarks->face_landmarks[i].right_eye_center.x,
7104 landmarks->face_landmarks[i].right_eye_center.y);
7105 mCropRegionMapper.toActiveArray(
7106 landmarks->face_landmarks[i].mouth_center.x,
7107 landmarks->face_landmarks[i].mouth_center.y);
7108
7109 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007110
7111 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7112 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7113 faceDetectionInfo->frame_id, i,
7114 faceLandmarks[k + LEFT_EYE_X],
7115 faceLandmarks[k + LEFT_EYE_Y],
7116 faceLandmarks[k + RIGHT_EYE_X],
7117 faceLandmarks[k + RIGHT_EYE_Y],
7118 faceLandmarks[k + MOUTH_X],
7119 faceLandmarks[k + MOUTH_Y]);
7120
Thierry Strudel04e026f2016-10-10 11:27:36 -07007121 k+= TOTAL_LANDMARK_INDICES;
7122 }
7123 } else {
7124 for (size_t i = 0; i < numFaces; i++) {
7125 setInvalidLandmarks(faceLandmarks+k);
7126 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007127 }
7128 }
7129
Jason Lee49619db2017-04-13 12:07:22 -07007130 for (size_t i = 0; i < numFaces; i++) {
7131 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7132
7133 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7134 faceDetectionInfo->frame_id, i, faceIds[i]);
7135 }
7136
Thierry Strudel3d639192016-09-09 11:52:26 -07007137 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7138 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7139 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007140 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007141 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7142 CAM_INTF_META_FACE_BLINK, metadata) {
7143 uint8_t detected[MAX_ROI];
7144 uint8_t degree[MAX_ROI * 2];
7145 for (size_t i = 0; i < numFaces; i++) {
7146 detected[i] = blinks->blink[i].blink_detected;
7147 degree[2 * i] = blinks->blink[i].left_blink;
7148 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007149
Jason Lee49619db2017-04-13 12:07:22 -07007150 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7151 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7152 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7153 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007154 }
7155 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7156 detected, numFaces);
7157 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7158 degree, numFaces * 2);
7159 }
7160 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7161 CAM_INTF_META_FACE_SMILE, metadata) {
7162 uint8_t degree[MAX_ROI];
7163 uint8_t confidence[MAX_ROI];
7164 for (size_t i = 0; i < numFaces; i++) {
7165 degree[i] = smiles->smile[i].smile_degree;
7166 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007167
Jason Lee49619db2017-04-13 12:07:22 -07007168 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7169 "smile_degree=%d, smile_score=%d",
7170 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007171 }
7172 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7173 degree, numFaces);
7174 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7175 confidence, numFaces);
7176 }
7177 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7178 CAM_INTF_META_FACE_GAZE, metadata) {
7179 int8_t angle[MAX_ROI];
7180 int32_t direction[MAX_ROI * 3];
7181 int8_t degree[MAX_ROI * 2];
7182 for (size_t i = 0; i < numFaces; i++) {
7183 angle[i] = gazes->gaze[i].gaze_angle;
7184 direction[3 * i] = gazes->gaze[i].updown_dir;
7185 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7186 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7187 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7188 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189
7190 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7191 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7192 "left_right_gaze=%d, top_bottom_gaze=%d",
7193 faceDetectionInfo->frame_id, i, angle[i],
7194 direction[3 * i], direction[3 * i + 1],
7195 direction[3 * i + 2],
7196 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007197 }
7198 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7199 (uint8_t *)angle, numFaces);
7200 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7201 direction, numFaces * 3);
7202 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7203 (uint8_t *)degree, numFaces * 2);
7204 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007205 }
7206 }
7207 }
7208 }
7209
7210 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7211 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007212 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007213 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007215
Shuzhen Wang14415f52016-11-16 18:26:18 -08007216 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7217 histogramBins = *histBins;
7218 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7219 }
7220
7221 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007222 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7223 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007224 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007225
7226 switch (stats_data->type) {
7227 case CAM_HISTOGRAM_TYPE_BAYER:
7228 switch (stats_data->bayer_stats.data_type) {
7229 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007230 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7231 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007232 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007233 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7234 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007235 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007236 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7237 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007238 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007239 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 case CAM_STATS_CHANNEL_R:
7241 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007242 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7243 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244 }
7245 break;
7246 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007247 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007248 break;
7249 }
7250
Shuzhen Wang14415f52016-11-16 18:26:18 -08007251 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007252 }
7253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007254 }
7255
7256 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7257 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7258 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7259 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7260 }
7261
7262 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7263 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7264 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7265 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7266 }
7267
7268 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7269 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7270 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7271 CAM_MAX_SHADING_MAP_HEIGHT);
7272 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7273 CAM_MAX_SHADING_MAP_WIDTH);
7274 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7275 lensShadingMap->lens_shading, 4U * map_width * map_height);
7276 }
7277
7278 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7279 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7280 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7281 }
7282
7283 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7284 //Populate CAM_INTF_META_TONEMAP_CURVES
7285 /* ch0 = G, ch 1 = B, ch 2 = R*/
7286 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7287 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7288 tonemap->tonemap_points_cnt,
7289 CAM_MAX_TONEMAP_CURVE_SIZE);
7290 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7291 }
7292
7293 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7294 &tonemap->curves[0].tonemap_points[0][0],
7295 tonemap->tonemap_points_cnt * 2);
7296
7297 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7298 &tonemap->curves[1].tonemap_points[0][0],
7299 tonemap->tonemap_points_cnt * 2);
7300
7301 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7302 &tonemap->curves[2].tonemap_points[0][0],
7303 tonemap->tonemap_points_cnt * 2);
7304 }
7305
7306 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7307 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7308 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7309 CC_GAIN_MAX);
7310 }
7311
7312 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7313 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7314 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7315 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7316 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7317 }
7318
7319 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7320 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7321 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7322 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7323 toneCurve->tonemap_points_cnt,
7324 CAM_MAX_TONEMAP_CURVE_SIZE);
7325 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7326 }
7327 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7328 (float*)toneCurve->curve.tonemap_points,
7329 toneCurve->tonemap_points_cnt * 2);
7330 }
7331
7332 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7333 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7334 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7335 predColorCorrectionGains->gains, 4);
7336 }
7337
7338 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7339 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7340 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7341 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7342 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7343 }
7344
7345 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7346 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7347 }
7348
7349 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7350 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7351 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7352 }
7353
7354 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7355 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7356 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7357 }
7358
7359 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7360 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7361 *effectMode);
7362 if (NAME_NOT_FOUND != val) {
7363 uint8_t fwk_effectMode = (uint8_t)val;
7364 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7365 }
7366 }
7367
7368 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7369 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7370 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7371 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7372 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7373 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7374 }
7375 int32_t fwk_testPatternData[4];
7376 fwk_testPatternData[0] = testPatternData->r;
7377 fwk_testPatternData[3] = testPatternData->b;
7378 switch (gCamCapability[mCameraId]->color_arrangement) {
7379 case CAM_FILTER_ARRANGEMENT_RGGB:
7380 case CAM_FILTER_ARRANGEMENT_GRBG:
7381 fwk_testPatternData[1] = testPatternData->gr;
7382 fwk_testPatternData[2] = testPatternData->gb;
7383 break;
7384 case CAM_FILTER_ARRANGEMENT_GBRG:
7385 case CAM_FILTER_ARRANGEMENT_BGGR:
7386 fwk_testPatternData[2] = testPatternData->gr;
7387 fwk_testPatternData[1] = testPatternData->gb;
7388 break;
7389 default:
7390 LOGE("color arrangement %d is not supported",
7391 gCamCapability[mCameraId]->color_arrangement);
7392 break;
7393 }
7394 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7395 }
7396
7397 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7398 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7399 }
7400
7401 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7402 String8 str((const char *)gps_methods);
7403 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7404 }
7405
7406 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7407 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7408 }
7409
7410 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7411 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7412 }
7413
7414 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7415 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7416 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7417 }
7418
7419 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7420 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7421 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7422 }
7423
7424 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7425 int32_t fwk_thumb_size[2];
7426 fwk_thumb_size[0] = thumb_size->width;
7427 fwk_thumb_size[1] = thumb_size->height;
7428 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7429 }
7430
7431 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7432 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7433 privateData,
7434 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7435 }
7436
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007437 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007438 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007439 meteringMode, 1);
7440 }
7441
Thierry Strudel54dc9782017-02-15 12:12:10 -08007442 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7443 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7444 LOGD("hdr_scene_data: %d %f\n",
7445 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7446 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7447 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7448 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7449 &isHdr, 1);
7450 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7451 &isHdrConfidence, 1);
7452 }
7453
7454
7455
Thierry Strudel3d639192016-09-09 11:52:26 -07007456 if (metadata->is_tuning_params_valid) {
7457 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7458 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7459 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7460
7461
7462 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7463 sizeof(uint32_t));
7464 data += sizeof(uint32_t);
7465
7466 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7467 sizeof(uint32_t));
7468 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7469 data += sizeof(uint32_t);
7470
7471 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7472 sizeof(uint32_t));
7473 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7474 data += sizeof(uint32_t);
7475
7476 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7477 sizeof(uint32_t));
7478 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7479 data += sizeof(uint32_t);
7480
7481 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7482 sizeof(uint32_t));
7483 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7484 data += sizeof(uint32_t);
7485
7486 metadata->tuning_params.tuning_mod3_data_size = 0;
7487 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7488 sizeof(uint32_t));
7489 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7490 data += sizeof(uint32_t);
7491
7492 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7493 TUNING_SENSOR_DATA_MAX);
7494 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7495 count);
7496 data += count;
7497
7498 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7499 TUNING_VFE_DATA_MAX);
7500 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7501 count);
7502 data += count;
7503
7504 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7505 TUNING_CPP_DATA_MAX);
7506 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7507 count);
7508 data += count;
7509
7510 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7511 TUNING_CAC_DATA_MAX);
7512 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7513 count);
7514 data += count;
7515
7516 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7517 (int32_t *)(void *)tuning_meta_data_blob,
7518 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7519 }
7520
7521 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7522 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7523 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7524 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7525 NEUTRAL_COL_POINTS);
7526 }
7527
7528 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7529 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7530 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7531 }
7532
7533 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7534 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7535 // Adjust crop region from sensor output coordinate system to active
7536 // array coordinate system.
7537 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7538 hAeRegions->rect.width, hAeRegions->rect.height);
7539
7540 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7541 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7542 REGIONS_TUPLE_COUNT);
7543 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7544 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7545 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7546 hAeRegions->rect.height);
7547 }
7548
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007549 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7550 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7551 if (NAME_NOT_FOUND != val) {
7552 uint8_t fwkAfMode = (uint8_t)val;
7553 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7554 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7555 } else {
7556 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7557 val);
7558 }
7559 }
7560
Thierry Strudel3d639192016-09-09 11:52:26 -07007561 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7562 uint8_t fwk_afState = (uint8_t) *afState;
7563 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007564 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007565 }
7566
7567 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7568 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7569 }
7570
7571 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7572 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7573 }
7574
7575 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7576 uint8_t fwk_lensState = *lensState;
7577 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7578 }
7579
Thierry Strudel3d639192016-09-09 11:52:26 -07007580
7581 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007582 uint32_t ab_mode = *hal_ab_mode;
7583 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7584 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7585 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007587 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007588 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007589 if (NAME_NOT_FOUND != val) {
7590 uint8_t fwk_ab_mode = (uint8_t)val;
7591 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7592 }
7593 }
7594
7595 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7596 int val = lookupFwkName(SCENE_MODES_MAP,
7597 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7598 if (NAME_NOT_FOUND != val) {
7599 uint8_t fwkBestshotMode = (uint8_t)val;
7600 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7601 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7602 } else {
7603 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7604 }
7605 }
7606
7607 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7608 uint8_t fwk_mode = (uint8_t) *mode;
7609 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7610 }
7611
7612 /* Constant metadata values to be update*/
7613 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7614 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7615
7616 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7617 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7618
7619 int32_t hotPixelMap[2];
7620 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7621
7622 // CDS
7623 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7624 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7625 }
7626
Thierry Strudel04e026f2016-10-10 11:27:36 -07007627 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7628 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007629 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007630 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7631 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7632 } else {
7633 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7634 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007635
7636 if(fwk_hdr != curr_hdr_state) {
7637 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7638 if(fwk_hdr)
7639 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7640 else
7641 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7642 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007643 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7644 }
7645
Thierry Strudel54dc9782017-02-15 12:12:10 -08007646 //binning correction
7647 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7648 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7649 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7650 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7651 }
7652
Thierry Strudel04e026f2016-10-10 11:27:36 -07007653 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007654 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007655 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7656 int8_t is_ir_on = 0;
7657
7658 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7659 if(is_ir_on != curr_ir_state) {
7660 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7661 if(is_ir_on)
7662 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7663 else
7664 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7665 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007666 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007667 }
7668
Thierry Strudel269c81a2016-10-12 12:13:59 -07007669 // AEC SPEED
7670 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7671 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7672 }
7673
7674 // AWB SPEED
7675 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7676 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7677 }
7678
Thierry Strudel3d639192016-09-09 11:52:26 -07007679 // TNR
7680 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7681 uint8_t tnr_enable = tnr->denoise_enable;
7682 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007683 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7684 int8_t is_tnr_on = 0;
7685
7686 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7687 if(is_tnr_on != curr_tnr_state) {
7688 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7689 if(is_tnr_on)
7690 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7691 else
7692 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7693 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007694
7695 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7696 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7697 }
7698
7699 // Reprocess crop data
7700 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7701 uint8_t cnt = crop_data->num_of_streams;
7702 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7703 // mm-qcamera-daemon only posts crop_data for streams
7704 // not linked to pproc. So no valid crop metadata is not
7705 // necessarily an error case.
7706 LOGD("No valid crop metadata entries");
7707 } else {
7708 uint32_t reproc_stream_id;
7709 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7710 LOGD("No reprocessible stream found, ignore crop data");
7711 } else {
7712 int rc = NO_ERROR;
7713 Vector<int32_t> roi_map;
7714 int32_t *crop = new int32_t[cnt*4];
7715 if (NULL == crop) {
7716 rc = NO_MEMORY;
7717 }
7718 if (NO_ERROR == rc) {
7719 int32_t streams_found = 0;
7720 for (size_t i = 0; i < cnt; i++) {
7721 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7722 if (pprocDone) {
7723 // HAL already does internal reprocessing,
7724 // either via reprocessing before JPEG encoding,
7725 // or offline postprocessing for pproc bypass case.
7726 crop[0] = 0;
7727 crop[1] = 0;
7728 crop[2] = mInputStreamInfo.dim.width;
7729 crop[3] = mInputStreamInfo.dim.height;
7730 } else {
7731 crop[0] = crop_data->crop_info[i].crop.left;
7732 crop[1] = crop_data->crop_info[i].crop.top;
7733 crop[2] = crop_data->crop_info[i].crop.width;
7734 crop[3] = crop_data->crop_info[i].crop.height;
7735 }
7736 roi_map.add(crop_data->crop_info[i].roi_map.left);
7737 roi_map.add(crop_data->crop_info[i].roi_map.top);
7738 roi_map.add(crop_data->crop_info[i].roi_map.width);
7739 roi_map.add(crop_data->crop_info[i].roi_map.height);
7740 streams_found++;
7741 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7742 crop[0], crop[1], crop[2], crop[3]);
7743 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7744 crop_data->crop_info[i].roi_map.left,
7745 crop_data->crop_info[i].roi_map.top,
7746 crop_data->crop_info[i].roi_map.width,
7747 crop_data->crop_info[i].roi_map.height);
7748 break;
7749
7750 }
7751 }
7752 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7753 &streams_found, 1);
7754 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7755 crop, (size_t)(streams_found * 4));
7756 if (roi_map.array()) {
7757 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7758 roi_map.array(), roi_map.size());
7759 }
7760 }
7761 if (crop) {
7762 delete [] crop;
7763 }
7764 }
7765 }
7766 }
7767
7768 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7769 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7770 // so hardcoding the CAC result to OFF mode.
7771 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7772 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7773 } else {
7774 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7775 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7776 *cacMode);
7777 if (NAME_NOT_FOUND != val) {
7778 uint8_t resultCacMode = (uint8_t)val;
7779 // check whether CAC result from CB is equal to Framework set CAC mode
7780 // If not equal then set the CAC mode came in corresponding request
7781 if (fwk_cacMode != resultCacMode) {
7782 resultCacMode = fwk_cacMode;
7783 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007784 //Check if CAC is disabled by property
7785 if (m_cacModeDisabled) {
7786 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7787 }
7788
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7790 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7791 } else {
7792 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7793 }
7794 }
7795 }
7796
7797 // Post blob of cam_cds_data through vendor tag.
7798 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7799 uint8_t cnt = cdsInfo->num_of_streams;
7800 cam_cds_data_t cdsDataOverride;
7801 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7802 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7803 cdsDataOverride.num_of_streams = 1;
7804 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7805 uint32_t reproc_stream_id;
7806 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7807 LOGD("No reprocessible stream found, ignore cds data");
7808 } else {
7809 for (size_t i = 0; i < cnt; i++) {
7810 if (cdsInfo->cds_info[i].stream_id ==
7811 reproc_stream_id) {
7812 cdsDataOverride.cds_info[0].cds_enable =
7813 cdsInfo->cds_info[i].cds_enable;
7814 break;
7815 }
7816 }
7817 }
7818 } else {
7819 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7820 }
7821 camMetadata.update(QCAMERA3_CDS_INFO,
7822 (uint8_t *)&cdsDataOverride,
7823 sizeof(cam_cds_data_t));
7824 }
7825
7826 // Ldaf calibration data
7827 if (!mLdafCalibExist) {
7828 IF_META_AVAILABLE(uint32_t, ldafCalib,
7829 CAM_INTF_META_LDAF_EXIF, metadata) {
7830 mLdafCalibExist = true;
7831 mLdafCalib[0] = ldafCalib[0];
7832 mLdafCalib[1] = ldafCalib[1];
7833 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7834 ldafCalib[0], ldafCalib[1]);
7835 }
7836 }
7837
Thierry Strudel54dc9782017-02-15 12:12:10 -08007838 // EXIF debug data through vendor tag
7839 /*
7840 * Mobicat Mask can assume 3 values:
7841 * 1 refers to Mobicat data,
7842 * 2 refers to Stats Debug and Exif Debug Data
7843 * 3 refers to Mobicat and Stats Debug Data
7844 * We want to make sure that we are sending Exif debug data
7845 * only when Mobicat Mask is 2.
7846 */
7847 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7848 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7849 (uint8_t *)(void *)mExifParams.debug_params,
7850 sizeof(mm_jpeg_debug_exif_params_t));
7851 }
7852
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007853 // Reprocess and DDM debug data through vendor tag
7854 cam_reprocess_info_t repro_info;
7855 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007856 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7857 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007858 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007859 }
7860 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7861 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007862 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007863 }
7864 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7865 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007866 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007867 }
7868 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7869 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007870 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007871 }
7872 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7873 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007874 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 }
7876 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007877 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007878 }
7879 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7880 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007881 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007882 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007883 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7884 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7885 }
7886 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7887 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7888 }
7889 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7890 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007891
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007892 // INSTANT AEC MODE
7893 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7894 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7895 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7896 }
7897
Shuzhen Wange763e802016-03-31 10:24:29 -07007898 // AF scene change
7899 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7900 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7901 }
7902
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007903 // Enable ZSL
7904 if (enableZsl != nullptr) {
7905 uint8_t value = *enableZsl ?
7906 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7907 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7908 }
7909
Xu Han821ea9c2017-05-23 09:00:40 -07007910 // OIS Data
7911 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7912 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7913 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7914 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7915 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7916 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7917 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7918 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7919 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7920 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7921 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7922 }
7923
Thierry Strudel3d639192016-09-09 11:52:26 -07007924 resultMetadata = camMetadata.release();
7925 return resultMetadata;
7926}
7927
7928/*===========================================================================
7929 * FUNCTION : saveExifParams
7930 *
7931 * DESCRIPTION:
7932 *
7933 * PARAMETERS :
7934 * @metadata : metadata information from callback
7935 *
7936 * RETURN : none
7937 *
7938 *==========================================================================*/
7939void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7940{
7941 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7942 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7943 if (mExifParams.debug_params) {
7944 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7945 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7946 }
7947 }
7948 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7949 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7950 if (mExifParams.debug_params) {
7951 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7952 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7953 }
7954 }
7955 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7956 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7957 if (mExifParams.debug_params) {
7958 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7959 mExifParams.debug_params->af_debug_params_valid = TRUE;
7960 }
7961 }
7962 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7963 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7964 if (mExifParams.debug_params) {
7965 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7966 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7967 }
7968 }
7969 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7970 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7971 if (mExifParams.debug_params) {
7972 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7973 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7974 }
7975 }
7976 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7977 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7978 if (mExifParams.debug_params) {
7979 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7980 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7981 }
7982 }
7983 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7984 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7985 if (mExifParams.debug_params) {
7986 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7987 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7988 }
7989 }
7990 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7991 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7992 if (mExifParams.debug_params) {
7993 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7994 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7995 }
7996 }
7997}
7998
7999/*===========================================================================
8000 * FUNCTION : get3AExifParams
8001 *
8002 * DESCRIPTION:
8003 *
8004 * PARAMETERS : none
8005 *
8006 *
8007 * RETURN : mm_jpeg_exif_params_t
8008 *
8009 *==========================================================================*/
8010mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8011{
8012 return mExifParams;
8013}
8014
8015/*===========================================================================
8016 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8017 *
8018 * DESCRIPTION:
8019 *
8020 * PARAMETERS :
8021 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008022 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8023 * urgent metadata in a batch. Always true for
8024 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008025 *
8026 * RETURN : camera_metadata_t*
8027 * metadata in a format specified by fwk
8028 *==========================================================================*/
8029camera_metadata_t*
8030QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008031 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008032{
8033 CameraMetadata camMetadata;
8034 camera_metadata_t *resultMetadata;
8035
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008036 if (!lastUrgentMetadataInBatch) {
8037 /* In batch mode, use empty metadata if this is not the last in batch
8038 */
8039 resultMetadata = allocate_camera_metadata(0, 0);
8040 return resultMetadata;
8041 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008042
8043 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8044 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8045 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8046 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8047 }
8048
8049 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8050 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8051 &aecTrigger->trigger, 1);
8052 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8053 &aecTrigger->trigger_id, 1);
8054 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8055 aecTrigger->trigger);
8056 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8057 aecTrigger->trigger_id);
8058 }
8059
8060 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8061 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8062 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8063 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8064 }
8065
Thierry Strudel3d639192016-09-09 11:52:26 -07008066 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8067 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8068 &af_trigger->trigger, 1);
8069 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8070 af_trigger->trigger);
8071 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8072 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8073 af_trigger->trigger_id);
8074 }
8075
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008076 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8077 /*af regions*/
8078 int32_t afRegions[REGIONS_TUPLE_COUNT];
8079 // Adjust crop region from sensor output coordinate system to active
8080 // array coordinate system.
8081 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8082 hAfRegions->rect.width, hAfRegions->rect.height);
8083
8084 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8085 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8086 REGIONS_TUPLE_COUNT);
8087 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8088 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8089 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8090 hAfRegions->rect.height);
8091 }
8092
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008093 // AF region confidence
8094 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8095 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8096 }
8097
Thierry Strudel3d639192016-09-09 11:52:26 -07008098 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8099 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8100 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8101 if (NAME_NOT_FOUND != val) {
8102 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8103 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8104 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8105 } else {
8106 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8107 }
8108 }
8109
8110 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8111 uint32_t aeMode = CAM_AE_MODE_MAX;
8112 int32_t flashMode = CAM_FLASH_MODE_MAX;
8113 int32_t redeye = -1;
8114 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8115 aeMode = *pAeMode;
8116 }
8117 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8118 flashMode = *pFlashMode;
8119 }
8120 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8121 redeye = *pRedeye;
8122 }
8123
8124 if (1 == redeye) {
8125 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8126 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8127 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8128 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8129 flashMode);
8130 if (NAME_NOT_FOUND != val) {
8131 fwk_aeMode = (uint8_t)val;
8132 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8133 } else {
8134 LOGE("Unsupported flash mode %d", flashMode);
8135 }
8136 } else if (aeMode == CAM_AE_MODE_ON) {
8137 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8138 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8139 } else if (aeMode == CAM_AE_MODE_OFF) {
8140 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8141 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008142 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8143 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8144 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008145 } else {
8146 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8147 "flashMode:%d, aeMode:%u!!!",
8148 redeye, flashMode, aeMode);
8149 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008150 if (mInstantAEC) {
8151 // Increment frame Idx count untill a bound reached for instant AEC.
8152 mInstantAecFrameIdxCount++;
8153 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8154 CAM_INTF_META_AEC_INFO, metadata) {
8155 LOGH("ae_params->settled = %d",ae_params->settled);
8156 // If AEC settled, or if number of frames reached bound value,
8157 // should reset instant AEC.
8158 if (ae_params->settled ||
8159 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8160 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8161 mInstantAEC = false;
8162 mResetInstantAEC = true;
8163 mInstantAecFrameIdxCount = 0;
8164 }
8165 }
8166 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008167 resultMetadata = camMetadata.release();
8168 return resultMetadata;
8169}
8170
8171/*===========================================================================
8172 * FUNCTION : dumpMetadataToFile
8173 *
8174 * DESCRIPTION: Dumps tuning metadata to file system
8175 *
8176 * PARAMETERS :
8177 * @meta : tuning metadata
8178 * @dumpFrameCount : current dump frame count
8179 * @enabled : Enable mask
8180 *
8181 *==========================================================================*/
8182void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8183 uint32_t &dumpFrameCount,
8184 bool enabled,
8185 const char *type,
8186 uint32_t frameNumber)
8187{
8188 //Some sanity checks
8189 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8190 LOGE("Tuning sensor data size bigger than expected %d: %d",
8191 meta.tuning_sensor_data_size,
8192 TUNING_SENSOR_DATA_MAX);
8193 return;
8194 }
8195
8196 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8197 LOGE("Tuning VFE data size bigger than expected %d: %d",
8198 meta.tuning_vfe_data_size,
8199 TUNING_VFE_DATA_MAX);
8200 return;
8201 }
8202
8203 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8204 LOGE("Tuning CPP data size bigger than expected %d: %d",
8205 meta.tuning_cpp_data_size,
8206 TUNING_CPP_DATA_MAX);
8207 return;
8208 }
8209
8210 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8211 LOGE("Tuning CAC data size bigger than expected %d: %d",
8212 meta.tuning_cac_data_size,
8213 TUNING_CAC_DATA_MAX);
8214 return;
8215 }
8216 //
8217
8218 if(enabled){
8219 char timeBuf[FILENAME_MAX];
8220 char buf[FILENAME_MAX];
8221 memset(buf, 0, sizeof(buf));
8222 memset(timeBuf, 0, sizeof(timeBuf));
8223 time_t current_time;
8224 struct tm * timeinfo;
8225 time (&current_time);
8226 timeinfo = localtime (&current_time);
8227 if (timeinfo != NULL) {
8228 strftime (timeBuf, sizeof(timeBuf),
8229 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8230 }
8231 String8 filePath(timeBuf);
8232 snprintf(buf,
8233 sizeof(buf),
8234 "%dm_%s_%d.bin",
8235 dumpFrameCount,
8236 type,
8237 frameNumber);
8238 filePath.append(buf);
8239 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8240 if (file_fd >= 0) {
8241 ssize_t written_len = 0;
8242 meta.tuning_data_version = TUNING_DATA_VERSION;
8243 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8244 written_len += write(file_fd, data, sizeof(uint32_t));
8245 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8246 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8247 written_len += write(file_fd, data, sizeof(uint32_t));
8248 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8249 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8250 written_len += write(file_fd, data, sizeof(uint32_t));
8251 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8252 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8253 written_len += write(file_fd, data, sizeof(uint32_t));
8254 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8255 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8256 written_len += write(file_fd, data, sizeof(uint32_t));
8257 meta.tuning_mod3_data_size = 0;
8258 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8259 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8260 written_len += write(file_fd, data, sizeof(uint32_t));
8261 size_t total_size = meta.tuning_sensor_data_size;
8262 data = (void *)((uint8_t *)&meta.data);
8263 written_len += write(file_fd, data, total_size);
8264 total_size = meta.tuning_vfe_data_size;
8265 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8266 written_len += write(file_fd, data, total_size);
8267 total_size = meta.tuning_cpp_data_size;
8268 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8269 written_len += write(file_fd, data, total_size);
8270 total_size = meta.tuning_cac_data_size;
8271 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8272 written_len += write(file_fd, data, total_size);
8273 close(file_fd);
8274 }else {
8275 LOGE("fail to open file for metadata dumping");
8276 }
8277 }
8278}
8279
8280/*===========================================================================
8281 * FUNCTION : cleanAndSortStreamInfo
8282 *
8283 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8284 * and sort them such that raw stream is at the end of the list
8285 * This is a workaround for camera daemon constraint.
8286 *
8287 * PARAMETERS : None
8288 *
8289 *==========================================================================*/
8290void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8291{
8292 List<stream_info_t *> newStreamInfo;
8293
8294 /*clean up invalid streams*/
8295 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8296 it != mStreamInfo.end();) {
8297 if(((*it)->status) == INVALID){
8298 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8299 delete channel;
8300 free(*it);
8301 it = mStreamInfo.erase(it);
8302 } else {
8303 it++;
8304 }
8305 }
8306
8307 // Move preview/video/callback/snapshot streams into newList
8308 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8309 it != mStreamInfo.end();) {
8310 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8311 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8312 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8313 newStreamInfo.push_back(*it);
8314 it = mStreamInfo.erase(it);
8315 } else
8316 it++;
8317 }
8318 // Move raw streams into newList
8319 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8320 it != mStreamInfo.end();) {
8321 newStreamInfo.push_back(*it);
8322 it = mStreamInfo.erase(it);
8323 }
8324
8325 mStreamInfo = newStreamInfo;
8326}
8327
8328/*===========================================================================
8329 * FUNCTION : extractJpegMetadata
8330 *
8331 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8332 * JPEG metadata is cached in HAL, and return as part of capture
8333 * result when metadata is returned from camera daemon.
8334 *
8335 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8336 * @request: capture request
8337 *
8338 *==========================================================================*/
8339void QCamera3HardwareInterface::extractJpegMetadata(
8340 CameraMetadata& jpegMetadata,
8341 const camera3_capture_request_t *request)
8342{
8343 CameraMetadata frame_settings;
8344 frame_settings = request->settings;
8345
8346 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8347 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8348 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8349 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8350
8351 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8352 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8353 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8354 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8355
8356 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8357 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8358 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8359 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8360
8361 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8362 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8363 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8364 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8365
8366 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8367 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8368 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8369 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8370
8371 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8372 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8373 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8374 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8375
8376 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8377 int32_t thumbnail_size[2];
8378 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8379 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8380 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8381 int32_t orientation =
8382 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008383 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008384 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8385 int32_t temp;
8386 temp = thumbnail_size[0];
8387 thumbnail_size[0] = thumbnail_size[1];
8388 thumbnail_size[1] = temp;
8389 }
8390 }
8391 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8392 thumbnail_size,
8393 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8394 }
8395
8396}
8397
8398/*===========================================================================
8399 * FUNCTION : convertToRegions
8400 *
8401 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8402 *
8403 * PARAMETERS :
8404 * @rect : cam_rect_t struct to convert
8405 * @region : int32_t destination array
8406 * @weight : if we are converting from cam_area_t, weight is valid
8407 * else weight = -1
8408 *
8409 *==========================================================================*/
8410void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8411 int32_t *region, int weight)
8412{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008413 region[FACE_LEFT] = rect.left;
8414 region[FACE_TOP] = rect.top;
8415 region[FACE_RIGHT] = rect.left + rect.width;
8416 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008417 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008418 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008419 }
8420}
8421
8422/*===========================================================================
8423 * FUNCTION : convertFromRegions
8424 *
8425 * DESCRIPTION: helper method to convert from array to cam_rect_t
8426 *
8427 * PARAMETERS :
8428 * @rect : cam_rect_t struct to convert
8429 * @region : int32_t destination array
8430 * @weight : if we are converting from cam_area_t, weight is valid
8431 * else weight = -1
8432 *
8433 *==========================================================================*/
8434void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008435 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008436{
Thierry Strudel3d639192016-09-09 11:52:26 -07008437 int32_t x_min = frame_settings.find(tag).data.i32[0];
8438 int32_t y_min = frame_settings.find(tag).data.i32[1];
8439 int32_t x_max = frame_settings.find(tag).data.i32[2];
8440 int32_t y_max = frame_settings.find(tag).data.i32[3];
8441 roi.weight = frame_settings.find(tag).data.i32[4];
8442 roi.rect.left = x_min;
8443 roi.rect.top = y_min;
8444 roi.rect.width = x_max - x_min;
8445 roi.rect.height = y_max - y_min;
8446}
8447
8448/*===========================================================================
8449 * FUNCTION : resetIfNeededROI
8450 *
8451 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8452 * crop region
8453 *
8454 * PARAMETERS :
8455 * @roi : cam_area_t struct to resize
8456 * @scalerCropRegion : cam_crop_region_t region to compare against
8457 *
8458 *
8459 *==========================================================================*/
8460bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8461 const cam_crop_region_t* scalerCropRegion)
8462{
8463 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8464 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8465 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8466 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8467
8468 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8469 * without having this check the calculations below to validate if the roi
8470 * is inside scalar crop region will fail resulting in the roi not being
8471 * reset causing algorithm to continue to use stale roi window
8472 */
8473 if (roi->weight == 0) {
8474 return true;
8475 }
8476
8477 if ((roi_x_max < scalerCropRegion->left) ||
8478 // right edge of roi window is left of scalar crop's left edge
8479 (roi_y_max < scalerCropRegion->top) ||
8480 // bottom edge of roi window is above scalar crop's top edge
8481 (roi->rect.left > crop_x_max) ||
8482 // left edge of roi window is beyond(right) of scalar crop's right edge
8483 (roi->rect.top > crop_y_max)){
8484 // top edge of roi windo is above scalar crop's top edge
8485 return false;
8486 }
8487 if (roi->rect.left < scalerCropRegion->left) {
8488 roi->rect.left = scalerCropRegion->left;
8489 }
8490 if (roi->rect.top < scalerCropRegion->top) {
8491 roi->rect.top = scalerCropRegion->top;
8492 }
8493 if (roi_x_max > crop_x_max) {
8494 roi_x_max = crop_x_max;
8495 }
8496 if (roi_y_max > crop_y_max) {
8497 roi_y_max = crop_y_max;
8498 }
8499 roi->rect.width = roi_x_max - roi->rect.left;
8500 roi->rect.height = roi_y_max - roi->rect.top;
8501 return true;
8502}
8503
8504/*===========================================================================
8505 * FUNCTION : convertLandmarks
8506 *
8507 * DESCRIPTION: helper method to extract the landmarks from face detection info
8508 *
8509 * PARAMETERS :
8510 * @landmark_data : input landmark data to be converted
8511 * @landmarks : int32_t destination array
8512 *
8513 *
8514 *==========================================================================*/
8515void QCamera3HardwareInterface::convertLandmarks(
8516 cam_face_landmarks_info_t landmark_data,
8517 int32_t *landmarks)
8518{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008519 if (landmark_data.is_left_eye_valid) {
8520 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8521 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8522 } else {
8523 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8524 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8525 }
8526
8527 if (landmark_data.is_right_eye_valid) {
8528 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8529 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8530 } else {
8531 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8532 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8533 }
8534
8535 if (landmark_data.is_mouth_valid) {
8536 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8537 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8538 } else {
8539 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8540 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8541 }
8542}
8543
8544/*===========================================================================
8545 * FUNCTION : setInvalidLandmarks
8546 *
8547 * DESCRIPTION: helper method to set invalid landmarks
8548 *
8549 * PARAMETERS :
8550 * @landmarks : int32_t destination array
8551 *
8552 *
8553 *==========================================================================*/
8554void QCamera3HardwareInterface::setInvalidLandmarks(
8555 int32_t *landmarks)
8556{
8557 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8558 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8559 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8560 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8561 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8562 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008563}
8564
8565#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008566
8567/*===========================================================================
8568 * FUNCTION : getCapabilities
8569 *
8570 * DESCRIPTION: query camera capability from back-end
8571 *
8572 * PARAMETERS :
8573 * @ops : mm-interface ops structure
8574 * @cam_handle : camera handle for which we need capability
8575 *
8576 * RETURN : ptr type of capability structure
8577 * capability for success
8578 * NULL for failure
8579 *==========================================================================*/
8580cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8581 uint32_t cam_handle)
8582{
8583 int rc = NO_ERROR;
8584 QCamera3HeapMemory *capabilityHeap = NULL;
8585 cam_capability_t *cap_ptr = NULL;
8586
8587 if (ops == NULL) {
8588 LOGE("Invalid arguments");
8589 return NULL;
8590 }
8591
8592 capabilityHeap = new QCamera3HeapMemory(1);
8593 if (capabilityHeap == NULL) {
8594 LOGE("creation of capabilityHeap failed");
8595 return NULL;
8596 }
8597
8598 /* Allocate memory for capability buffer */
8599 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8600 if(rc != OK) {
8601 LOGE("No memory for cappability");
8602 goto allocate_failed;
8603 }
8604
8605 /* Map memory for capability buffer */
8606 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8607
8608 rc = ops->map_buf(cam_handle,
8609 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8610 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8611 if(rc < 0) {
8612 LOGE("failed to map capability buffer");
8613 rc = FAILED_TRANSACTION;
8614 goto map_failed;
8615 }
8616
8617 /* Query Capability */
8618 rc = ops->query_capability(cam_handle);
8619 if(rc < 0) {
8620 LOGE("failed to query capability");
8621 rc = FAILED_TRANSACTION;
8622 goto query_failed;
8623 }
8624
8625 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8626 if (cap_ptr == NULL) {
8627 LOGE("out of memory");
8628 rc = NO_MEMORY;
8629 goto query_failed;
8630 }
8631
8632 memset(cap_ptr, 0, sizeof(cam_capability_t));
8633 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8634
8635 int index;
8636 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8637 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8638 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8639 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8640 }
8641
8642query_failed:
8643 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8644map_failed:
8645 capabilityHeap->deallocate();
8646allocate_failed:
8647 delete capabilityHeap;
8648
8649 if (rc != NO_ERROR) {
8650 return NULL;
8651 } else {
8652 return cap_ptr;
8653 }
8654}
8655
Thierry Strudel3d639192016-09-09 11:52:26 -07008656/*===========================================================================
8657 * FUNCTION : initCapabilities
8658 *
8659 * DESCRIPTION: initialize camera capabilities in static data struct
8660 *
8661 * PARAMETERS :
8662 * @cameraId : camera Id
8663 *
8664 * RETURN : int32_t type of status
8665 * NO_ERROR -- success
8666 * none-zero failure code
8667 *==========================================================================*/
8668int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8669{
8670 int rc = 0;
8671 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008672 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008673
8674 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8675 if (rc) {
8676 LOGE("camera_open failed. rc = %d", rc);
8677 goto open_failed;
8678 }
8679 if (!cameraHandle) {
8680 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8681 goto open_failed;
8682 }
8683
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008684 handle = get_main_camera_handle(cameraHandle->camera_handle);
8685 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8686 if (gCamCapability[cameraId] == NULL) {
8687 rc = FAILED_TRANSACTION;
8688 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008689 }
8690
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008691 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008692 if (is_dual_camera_by_idx(cameraId)) {
8693 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8694 gCamCapability[cameraId]->aux_cam_cap =
8695 getCapabilities(cameraHandle->ops, handle);
8696 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8697 rc = FAILED_TRANSACTION;
8698 free(gCamCapability[cameraId]);
8699 goto failed_op;
8700 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008701
8702 // Copy the main camera capability to main_cam_cap struct
8703 gCamCapability[cameraId]->main_cam_cap =
8704 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8705 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8706 LOGE("out of memory");
8707 rc = NO_MEMORY;
8708 goto failed_op;
8709 }
8710 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8711 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008712 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008713failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008714 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8715 cameraHandle = NULL;
8716open_failed:
8717 return rc;
8718}
8719
8720/*==========================================================================
8721 * FUNCTION : get3Aversion
8722 *
8723 * DESCRIPTION: get the Q3A S/W version
8724 *
8725 * PARAMETERS :
8726 * @sw_version: Reference of Q3A structure which will hold version info upon
8727 * return
8728 *
8729 * RETURN : None
8730 *
8731 *==========================================================================*/
8732void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8733{
8734 if(gCamCapability[mCameraId])
8735 sw_version = gCamCapability[mCameraId]->q3a_version;
8736 else
8737 LOGE("Capability structure NULL!");
8738}
8739
8740
8741/*===========================================================================
8742 * FUNCTION : initParameters
8743 *
8744 * DESCRIPTION: initialize camera parameters
8745 *
8746 * PARAMETERS :
8747 *
8748 * RETURN : int32_t type of status
8749 * NO_ERROR -- success
8750 * none-zero failure code
8751 *==========================================================================*/
8752int QCamera3HardwareInterface::initParameters()
8753{
8754 int rc = 0;
8755
8756 //Allocate Set Param Buffer
8757 mParamHeap = new QCamera3HeapMemory(1);
8758 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8759 if(rc != OK) {
8760 rc = NO_MEMORY;
8761 LOGE("Failed to allocate SETPARM Heap memory");
8762 delete mParamHeap;
8763 mParamHeap = NULL;
8764 return rc;
8765 }
8766
8767 //Map memory for parameters buffer
8768 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8769 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8770 mParamHeap->getFd(0),
8771 sizeof(metadata_buffer_t),
8772 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8773 if(rc < 0) {
8774 LOGE("failed to map SETPARM buffer");
8775 rc = FAILED_TRANSACTION;
8776 mParamHeap->deallocate();
8777 delete mParamHeap;
8778 mParamHeap = NULL;
8779 return rc;
8780 }
8781
8782 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8783
8784 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8785 return rc;
8786}
8787
8788/*===========================================================================
8789 * FUNCTION : deinitParameters
8790 *
8791 * DESCRIPTION: de-initialize camera parameters
8792 *
8793 * PARAMETERS :
8794 *
8795 * RETURN : NONE
8796 *==========================================================================*/
8797void QCamera3HardwareInterface::deinitParameters()
8798{
8799 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8800 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8801
8802 mParamHeap->deallocate();
8803 delete mParamHeap;
8804 mParamHeap = NULL;
8805
8806 mParameters = NULL;
8807
8808 free(mPrevParameters);
8809 mPrevParameters = NULL;
8810}
8811
8812/*===========================================================================
8813 * FUNCTION : calcMaxJpegSize
8814 *
8815 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8816 *
8817 * PARAMETERS :
8818 *
8819 * RETURN : max_jpeg_size
8820 *==========================================================================*/
8821size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8822{
8823 size_t max_jpeg_size = 0;
8824 size_t temp_width, temp_height;
8825 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8826 MAX_SIZES_CNT);
8827 for (size_t i = 0; i < count; i++) {
8828 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8829 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8830 if (temp_width * temp_height > max_jpeg_size ) {
8831 max_jpeg_size = temp_width * temp_height;
8832 }
8833 }
8834 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8835 return max_jpeg_size;
8836}
8837
8838/*===========================================================================
8839 * FUNCTION : getMaxRawSize
8840 *
8841 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8842 *
8843 * PARAMETERS :
8844 *
8845 * RETURN : Largest supported Raw Dimension
8846 *==========================================================================*/
8847cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8848{
8849 int max_width = 0;
8850 cam_dimension_t maxRawSize;
8851
8852 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8853 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8854 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8855 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8856 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8857 }
8858 }
8859 return maxRawSize;
8860}
8861
8862
8863/*===========================================================================
8864 * FUNCTION : calcMaxJpegDim
8865 *
8866 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8867 *
8868 * PARAMETERS :
8869 *
8870 * RETURN : max_jpeg_dim
8871 *==========================================================================*/
8872cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8873{
8874 cam_dimension_t max_jpeg_dim;
8875 cam_dimension_t curr_jpeg_dim;
8876 max_jpeg_dim.width = 0;
8877 max_jpeg_dim.height = 0;
8878 curr_jpeg_dim.width = 0;
8879 curr_jpeg_dim.height = 0;
8880 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8881 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8882 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8883 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8884 max_jpeg_dim.width * max_jpeg_dim.height ) {
8885 max_jpeg_dim.width = curr_jpeg_dim.width;
8886 max_jpeg_dim.height = curr_jpeg_dim.height;
8887 }
8888 }
8889 return max_jpeg_dim;
8890}
8891
8892/*===========================================================================
8893 * FUNCTION : addStreamConfig
8894 *
8895 * DESCRIPTION: adds the stream configuration to the array
8896 *
8897 * PARAMETERS :
8898 * @available_stream_configs : pointer to stream configuration array
8899 * @scalar_format : scalar format
8900 * @dim : configuration dimension
8901 * @config_type : input or output configuration type
8902 *
8903 * RETURN : NONE
8904 *==========================================================================*/
8905void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8906 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8907{
8908 available_stream_configs.add(scalar_format);
8909 available_stream_configs.add(dim.width);
8910 available_stream_configs.add(dim.height);
8911 available_stream_configs.add(config_type);
8912}
8913
8914/*===========================================================================
8915 * FUNCTION : suppportBurstCapture
8916 *
8917 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8918 *
8919 * PARAMETERS :
8920 * @cameraId : camera Id
8921 *
8922 * RETURN : true if camera supports BURST_CAPTURE
8923 * false otherwise
8924 *==========================================================================*/
8925bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8926{
8927 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8928 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8929 const int32_t highResWidth = 3264;
8930 const int32_t highResHeight = 2448;
8931
8932 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8933 // Maximum resolution images cannot be captured at >= 10fps
8934 // -> not supporting BURST_CAPTURE
8935 return false;
8936 }
8937
8938 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8939 // Maximum resolution images can be captured at >= 20fps
8940 // --> supporting BURST_CAPTURE
8941 return true;
8942 }
8943
8944 // Find the smallest highRes resolution, or largest resolution if there is none
8945 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8946 MAX_SIZES_CNT);
8947 size_t highRes = 0;
8948 while ((highRes + 1 < totalCnt) &&
8949 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8950 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8951 highResWidth * highResHeight)) {
8952 highRes++;
8953 }
8954 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8955 return true;
8956 } else {
8957 return false;
8958 }
8959}
8960
8961/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008962 * FUNCTION : getPDStatIndex
8963 *
8964 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8965 *
8966 * PARAMETERS :
8967 * @caps : camera capabilities
8968 *
8969 * RETURN : int32_t type
8970 * non-negative - on success
8971 * -1 - on failure
8972 *==========================================================================*/
8973int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8974 if (nullptr == caps) {
8975 return -1;
8976 }
8977
8978 uint32_t metaRawCount = caps->meta_raw_channel_count;
8979 int32_t ret = -1;
8980 for (size_t i = 0; i < metaRawCount; i++) {
8981 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8982 ret = i;
8983 break;
8984 }
8985 }
8986
8987 return ret;
8988}
8989
8990/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008991 * FUNCTION : initStaticMetadata
8992 *
8993 * DESCRIPTION: initialize the static metadata
8994 *
8995 * PARAMETERS :
8996 * @cameraId : camera Id
8997 *
8998 * RETURN : int32_t type of status
8999 * 0 -- success
9000 * non-zero failure code
9001 *==========================================================================*/
9002int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9003{
9004 int rc = 0;
9005 CameraMetadata staticInfo;
9006 size_t count = 0;
9007 bool limitedDevice = false;
9008 char prop[PROPERTY_VALUE_MAX];
9009 bool supportBurst = false;
9010
9011 supportBurst = supportBurstCapture(cameraId);
9012
9013 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9014 * guaranteed or if min fps of max resolution is less than 20 fps, its
9015 * advertised as limited device*/
9016 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9017 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9018 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9019 !supportBurst;
9020
9021 uint8_t supportedHwLvl = limitedDevice ?
9022 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009023#ifndef USE_HAL_3_3
9024 // LEVEL_3 - This device will support level 3.
9025 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9026#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009027 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009028#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009029
9030 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9031 &supportedHwLvl, 1);
9032
9033 bool facingBack = false;
9034 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9035 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9036 facingBack = true;
9037 }
9038 /*HAL 3 only*/
9039 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9040 &gCamCapability[cameraId]->min_focus_distance, 1);
9041
9042 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9043 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9044
9045 /*should be using focal lengths but sensor doesn't provide that info now*/
9046 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9047 &gCamCapability[cameraId]->focal_length,
9048 1);
9049
9050 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9051 gCamCapability[cameraId]->apertures,
9052 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9053
9054 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9055 gCamCapability[cameraId]->filter_densities,
9056 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9057
9058
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009059 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9060 size_t mode_count =
9061 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9062 for (size_t i = 0; i < mode_count; i++) {
9063 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9064 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009065 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009066 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009067
9068 int32_t lens_shading_map_size[] = {
9069 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9070 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9071 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9072 lens_shading_map_size,
9073 sizeof(lens_shading_map_size)/sizeof(int32_t));
9074
9075 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9076 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9077
9078 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9079 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9080
9081 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9082 &gCamCapability[cameraId]->max_frame_duration, 1);
9083
9084 camera_metadata_rational baseGainFactor = {
9085 gCamCapability[cameraId]->base_gain_factor.numerator,
9086 gCamCapability[cameraId]->base_gain_factor.denominator};
9087 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9088 &baseGainFactor, 1);
9089
9090 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9091 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9092
9093 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9094 gCamCapability[cameraId]->pixel_array_size.height};
9095 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9096 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9097
9098 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9099 gCamCapability[cameraId]->active_array_size.top,
9100 gCamCapability[cameraId]->active_array_size.width,
9101 gCamCapability[cameraId]->active_array_size.height};
9102 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9103 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9104
9105 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9106 &gCamCapability[cameraId]->white_level, 1);
9107
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009108 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9109 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9110 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009112 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009113
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009114#ifndef USE_HAL_3_3
9115 bool hasBlackRegions = false;
9116 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9117 LOGW("black_region_count: %d is bounded to %d",
9118 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9119 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9120 }
9121 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9122 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9123 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9124 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9125 }
9126 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9127 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9128 hasBlackRegions = true;
9129 }
9130#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009131 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9132 &gCamCapability[cameraId]->flash_charge_duration, 1);
9133
9134 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9135 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9136
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009137 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9138 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9139 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009140 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9141 &timestampSource, 1);
9142
Thierry Strudel54dc9782017-02-15 12:12:10 -08009143 //update histogram vendor data
9144 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009145 &gCamCapability[cameraId]->histogram_size, 1);
9146
Thierry Strudel54dc9782017-02-15 12:12:10 -08009147 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009148 &gCamCapability[cameraId]->max_histogram_count, 1);
9149
Shuzhen Wang14415f52016-11-16 18:26:18 -08009150 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9151 //so that app can request fewer number of bins than the maximum supported.
9152 std::vector<int32_t> histBins;
9153 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9154 histBins.push_back(maxHistBins);
9155 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9156 (maxHistBins & 0x1) == 0) {
9157 histBins.push_back(maxHistBins >> 1);
9158 maxHistBins >>= 1;
9159 }
9160 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9161 histBins.data(), histBins.size());
9162
Thierry Strudel3d639192016-09-09 11:52:26 -07009163 int32_t sharpness_map_size[] = {
9164 gCamCapability[cameraId]->sharpness_map_size.width,
9165 gCamCapability[cameraId]->sharpness_map_size.height};
9166
9167 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9168 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9169
9170 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9171 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9172
Emilian Peev0f3c3162017-03-15 12:57:46 +00009173 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9174 if (0 <= indexPD) {
9175 // Advertise PD stats data as part of the Depth capabilities
9176 int32_t depthWidth =
9177 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9178 int32_t depthHeight =
9179 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9180 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9181 assert(0 < depthSamplesCount);
9182 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9183 &depthSamplesCount, 1);
9184
9185 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9186 depthHeight,
9187 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9188 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9189 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9190 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9191 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9192
9193 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9194 depthHeight, 33333333,
9195 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9196 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9197 depthMinDuration,
9198 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9199
9200 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9201 depthHeight, 0,
9202 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9203 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9204 depthStallDuration,
9205 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9206
9207 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9208 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9209 }
9210
Thierry Strudel3d639192016-09-09 11:52:26 -07009211 int32_t scalar_formats[] = {
9212 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9213 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9214 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9215 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9216 HAL_PIXEL_FORMAT_RAW10,
9217 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009218 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9219 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9220 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009221
9222 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9223 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9224 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9225 count, MAX_SIZES_CNT, available_processed_sizes);
9226 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9227 available_processed_sizes, count * 2);
9228
9229 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9230 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9231 makeTable(gCamCapability[cameraId]->raw_dim,
9232 count, MAX_SIZES_CNT, available_raw_sizes);
9233 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9234 available_raw_sizes, count * 2);
9235
9236 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9237 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9238 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9239 count, MAX_SIZES_CNT, available_fps_ranges);
9240 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9241 available_fps_ranges, count * 2);
9242
9243 camera_metadata_rational exposureCompensationStep = {
9244 gCamCapability[cameraId]->exp_compensation_step.numerator,
9245 gCamCapability[cameraId]->exp_compensation_step.denominator};
9246 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9247 &exposureCompensationStep, 1);
9248
9249 Vector<uint8_t> availableVstabModes;
9250 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9251 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009252 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009253 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009254 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009255 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009256 count = IS_TYPE_MAX;
9257 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9258 for (size_t i = 0; i < count; i++) {
9259 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9260 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9261 eisSupported = true;
9262 break;
9263 }
9264 }
9265 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009266 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9267 }
9268 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9269 availableVstabModes.array(), availableVstabModes.size());
9270
9271 /*HAL 1 and HAL 3 common*/
9272 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9273 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9274 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009275 // Cap the max zoom to the max preferred value
9276 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009277 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9278 &maxZoom, 1);
9279
9280 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9281 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9282
9283 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9284 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9285 max3aRegions[2] = 0; /* AF not supported */
9286 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9287 max3aRegions, 3);
9288
9289 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9290 memset(prop, 0, sizeof(prop));
9291 property_get("persist.camera.facedetect", prop, "1");
9292 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9293 LOGD("Support face detection mode: %d",
9294 supportedFaceDetectMode);
9295
9296 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009297 /* support mode should be OFF if max number of face is 0 */
9298 if (maxFaces <= 0) {
9299 supportedFaceDetectMode = 0;
9300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009301 Vector<uint8_t> availableFaceDetectModes;
9302 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9303 if (supportedFaceDetectMode == 1) {
9304 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9305 } else if (supportedFaceDetectMode == 2) {
9306 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9307 } else if (supportedFaceDetectMode == 3) {
9308 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9309 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9310 } else {
9311 maxFaces = 0;
9312 }
9313 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9314 availableFaceDetectModes.array(),
9315 availableFaceDetectModes.size());
9316 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9317 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009318 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9319 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9320 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009321
9322 int32_t exposureCompensationRange[] = {
9323 gCamCapability[cameraId]->exposure_compensation_min,
9324 gCamCapability[cameraId]->exposure_compensation_max};
9325 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9326 exposureCompensationRange,
9327 sizeof(exposureCompensationRange)/sizeof(int32_t));
9328
9329 uint8_t lensFacing = (facingBack) ?
9330 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9331 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9332
9333 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9334 available_thumbnail_sizes,
9335 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9336
9337 /*all sizes will be clubbed into this tag*/
9338 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9339 /*android.scaler.availableStreamConfigurations*/
9340 Vector<int32_t> available_stream_configs;
9341 cam_dimension_t active_array_dim;
9342 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9343 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009344
9345 /*advertise list of input dimensions supported based on below property.
9346 By default all sizes upto 5MP will be advertised.
9347 Note that the setprop resolution format should be WxH.
9348 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9349 To list all supported sizes, setprop needs to be set with "0x0" */
9350 cam_dimension_t minInputSize = {2592,1944}; //5MP
9351 memset(prop, 0, sizeof(prop));
9352 property_get("persist.camera.input.minsize", prop, "2592x1944");
9353 if (strlen(prop) > 0) {
9354 char *saveptr = NULL;
9355 char *token = strtok_r(prop, "x", &saveptr);
9356 if (token != NULL) {
9357 minInputSize.width = atoi(token);
9358 }
9359 token = strtok_r(NULL, "x", &saveptr);
9360 if (token != NULL) {
9361 minInputSize.height = atoi(token);
9362 }
9363 }
9364
Thierry Strudel3d639192016-09-09 11:52:26 -07009365 /* Add input/output stream configurations for each scalar formats*/
9366 for (size_t j = 0; j < scalar_formats_count; j++) {
9367 switch (scalar_formats[j]) {
9368 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9369 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9370 case HAL_PIXEL_FORMAT_RAW10:
9371 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9372 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9373 addStreamConfig(available_stream_configs, scalar_formats[j],
9374 gCamCapability[cameraId]->raw_dim[i],
9375 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9376 }
9377 break;
9378 case HAL_PIXEL_FORMAT_BLOB:
9379 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9380 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9381 addStreamConfig(available_stream_configs, scalar_formats[j],
9382 gCamCapability[cameraId]->picture_sizes_tbl[i],
9383 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9384 }
9385 break;
9386 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9387 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9388 default:
9389 cam_dimension_t largest_picture_size;
9390 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9391 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9392 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9393 addStreamConfig(available_stream_configs, scalar_formats[j],
9394 gCamCapability[cameraId]->picture_sizes_tbl[i],
9395 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009396 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009397 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9398 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009399 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9400 >= minInputSize.width) || (gCamCapability[cameraId]->
9401 picture_sizes_tbl[i].height >= minInputSize.height)) {
9402 addStreamConfig(available_stream_configs, scalar_formats[j],
9403 gCamCapability[cameraId]->picture_sizes_tbl[i],
9404 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9405 }
9406 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009407 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009408
Thierry Strudel3d639192016-09-09 11:52:26 -07009409 break;
9410 }
9411 }
9412
9413 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9414 available_stream_configs.array(), available_stream_configs.size());
9415 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9416 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9417
9418 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9419 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9420
9421 /* android.scaler.availableMinFrameDurations */
9422 Vector<int64_t> available_min_durations;
9423 for (size_t j = 0; j < scalar_formats_count; j++) {
9424 switch (scalar_formats[j]) {
9425 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9426 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9427 case HAL_PIXEL_FORMAT_RAW10:
9428 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9429 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9430 available_min_durations.add(scalar_formats[j]);
9431 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9432 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9433 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9434 }
9435 break;
9436 default:
9437 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9438 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9439 available_min_durations.add(scalar_formats[j]);
9440 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9441 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9442 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9443 }
9444 break;
9445 }
9446 }
9447 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9448 available_min_durations.array(), available_min_durations.size());
9449
9450 Vector<int32_t> available_hfr_configs;
9451 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9452 int32_t fps = 0;
9453 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9454 case CAM_HFR_MODE_60FPS:
9455 fps = 60;
9456 break;
9457 case CAM_HFR_MODE_90FPS:
9458 fps = 90;
9459 break;
9460 case CAM_HFR_MODE_120FPS:
9461 fps = 120;
9462 break;
9463 case CAM_HFR_MODE_150FPS:
9464 fps = 150;
9465 break;
9466 case CAM_HFR_MODE_180FPS:
9467 fps = 180;
9468 break;
9469 case CAM_HFR_MODE_210FPS:
9470 fps = 210;
9471 break;
9472 case CAM_HFR_MODE_240FPS:
9473 fps = 240;
9474 break;
9475 case CAM_HFR_MODE_480FPS:
9476 fps = 480;
9477 break;
9478 case CAM_HFR_MODE_OFF:
9479 case CAM_HFR_MODE_MAX:
9480 default:
9481 break;
9482 }
9483
9484 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9485 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9486 /* For each HFR frame rate, need to advertise one variable fps range
9487 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9488 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9489 * set by the app. When video recording is started, [120, 120] is
9490 * set. This way sensor configuration does not change when recording
9491 * is started */
9492
9493 /* (width, height, fps_min, fps_max, batch_size_max) */
9494 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9495 j < MAX_SIZES_CNT; j++) {
9496 available_hfr_configs.add(
9497 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9498 available_hfr_configs.add(
9499 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9500 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9501 available_hfr_configs.add(fps);
9502 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9503
9504 /* (width, height, fps_min, fps_max, batch_size_max) */
9505 available_hfr_configs.add(
9506 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9507 available_hfr_configs.add(
9508 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9509 available_hfr_configs.add(fps);
9510 available_hfr_configs.add(fps);
9511 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9512 }
9513 }
9514 }
9515 //Advertise HFR capability only if the property is set
9516 memset(prop, 0, sizeof(prop));
9517 property_get("persist.camera.hal3hfr.enable", prop, "1");
9518 uint8_t hfrEnable = (uint8_t)atoi(prop);
9519
9520 if(hfrEnable && available_hfr_configs.array()) {
9521 staticInfo.update(
9522 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9523 available_hfr_configs.array(), available_hfr_configs.size());
9524 }
9525
9526 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9527 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9528 &max_jpeg_size, 1);
9529
9530 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9531 size_t size = 0;
9532 count = CAM_EFFECT_MODE_MAX;
9533 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9534 for (size_t i = 0; i < count; i++) {
9535 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9536 gCamCapability[cameraId]->supported_effects[i]);
9537 if (NAME_NOT_FOUND != val) {
9538 avail_effects[size] = (uint8_t)val;
9539 size++;
9540 }
9541 }
9542 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9543 avail_effects,
9544 size);
9545
9546 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9547 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9548 size_t supported_scene_modes_cnt = 0;
9549 count = CAM_SCENE_MODE_MAX;
9550 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9551 for (size_t i = 0; i < count; i++) {
9552 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9553 CAM_SCENE_MODE_OFF) {
9554 int val = lookupFwkName(SCENE_MODES_MAP,
9555 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9556 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009557
Thierry Strudel3d639192016-09-09 11:52:26 -07009558 if (NAME_NOT_FOUND != val) {
9559 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9560 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9561 supported_scene_modes_cnt++;
9562 }
9563 }
9564 }
9565 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9566 avail_scene_modes,
9567 supported_scene_modes_cnt);
9568
9569 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9570 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9571 supported_scene_modes_cnt,
9572 CAM_SCENE_MODE_MAX,
9573 scene_mode_overrides,
9574 supported_indexes,
9575 cameraId);
9576
9577 if (supported_scene_modes_cnt == 0) {
9578 supported_scene_modes_cnt = 1;
9579 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9580 }
9581
9582 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9583 scene_mode_overrides, supported_scene_modes_cnt * 3);
9584
9585 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9586 ANDROID_CONTROL_MODE_AUTO,
9587 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9588 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9589 available_control_modes,
9590 3);
9591
9592 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9593 size = 0;
9594 count = CAM_ANTIBANDING_MODE_MAX;
9595 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9596 for (size_t i = 0; i < count; i++) {
9597 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9598 gCamCapability[cameraId]->supported_antibandings[i]);
9599 if (NAME_NOT_FOUND != val) {
9600 avail_antibanding_modes[size] = (uint8_t)val;
9601 size++;
9602 }
9603
9604 }
9605 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9606 avail_antibanding_modes,
9607 size);
9608
9609 uint8_t avail_abberation_modes[] = {
9610 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9611 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9612 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9613 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9614 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9615 if (0 == count) {
9616 // If no aberration correction modes are available for a device, this advertise OFF mode
9617 size = 1;
9618 } else {
9619 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9620 // So, advertize all 3 modes if atleast any one mode is supported as per the
9621 // new M requirement
9622 size = 3;
9623 }
9624 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9625 avail_abberation_modes,
9626 size);
9627
9628 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9629 size = 0;
9630 count = CAM_FOCUS_MODE_MAX;
9631 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9632 for (size_t i = 0; i < count; i++) {
9633 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9634 gCamCapability[cameraId]->supported_focus_modes[i]);
9635 if (NAME_NOT_FOUND != val) {
9636 avail_af_modes[size] = (uint8_t)val;
9637 size++;
9638 }
9639 }
9640 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9641 avail_af_modes,
9642 size);
9643
9644 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9645 size = 0;
9646 count = CAM_WB_MODE_MAX;
9647 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9648 for (size_t i = 0; i < count; i++) {
9649 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9650 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9651 gCamCapability[cameraId]->supported_white_balances[i]);
9652 if (NAME_NOT_FOUND != val) {
9653 avail_awb_modes[size] = (uint8_t)val;
9654 size++;
9655 }
9656 }
9657 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9658 avail_awb_modes,
9659 size);
9660
9661 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9662 count = CAM_FLASH_FIRING_LEVEL_MAX;
9663 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9664 count);
9665 for (size_t i = 0; i < count; i++) {
9666 available_flash_levels[i] =
9667 gCamCapability[cameraId]->supported_firing_levels[i];
9668 }
9669 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9670 available_flash_levels, count);
9671
9672 uint8_t flashAvailable;
9673 if (gCamCapability[cameraId]->flash_available)
9674 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9675 else
9676 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9677 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9678 &flashAvailable, 1);
9679
9680 Vector<uint8_t> avail_ae_modes;
9681 count = CAM_AE_MODE_MAX;
9682 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9683 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009684 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9685 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9686 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9687 }
9688 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009689 }
9690 if (flashAvailable) {
9691 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9692 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9693 }
9694 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9695 avail_ae_modes.array(),
9696 avail_ae_modes.size());
9697
9698 int32_t sensitivity_range[2];
9699 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9700 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9701 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9702 sensitivity_range,
9703 sizeof(sensitivity_range) / sizeof(int32_t));
9704
9705 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9706 &gCamCapability[cameraId]->max_analog_sensitivity,
9707 1);
9708
9709 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9710 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9711 &sensor_orientation,
9712 1);
9713
9714 int32_t max_output_streams[] = {
9715 MAX_STALLING_STREAMS,
9716 MAX_PROCESSED_STREAMS,
9717 MAX_RAW_STREAMS};
9718 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9719 max_output_streams,
9720 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9721
9722 uint8_t avail_leds = 0;
9723 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9724 &avail_leds, 0);
9725
9726 uint8_t focus_dist_calibrated;
9727 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9728 gCamCapability[cameraId]->focus_dist_calibrated);
9729 if (NAME_NOT_FOUND != val) {
9730 focus_dist_calibrated = (uint8_t)val;
9731 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9732 &focus_dist_calibrated, 1);
9733 }
9734
9735 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9736 size = 0;
9737 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9738 MAX_TEST_PATTERN_CNT);
9739 for (size_t i = 0; i < count; i++) {
9740 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9741 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9742 if (NAME_NOT_FOUND != testpatternMode) {
9743 avail_testpattern_modes[size] = testpatternMode;
9744 size++;
9745 }
9746 }
9747 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9748 avail_testpattern_modes,
9749 size);
9750
9751 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9752 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9753 &max_pipeline_depth,
9754 1);
9755
9756 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9757 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9758 &partial_result_count,
9759 1);
9760
9761 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9762 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9763
9764 Vector<uint8_t> available_capabilities;
9765 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9766 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9767 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9768 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9769 if (supportBurst) {
9770 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9771 }
9772 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9773 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9774 if (hfrEnable && available_hfr_configs.array()) {
9775 available_capabilities.add(
9776 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9777 }
9778
9779 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9780 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9781 }
9782 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9783 available_capabilities.array(),
9784 available_capabilities.size());
9785
9786 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9787 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9788 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9789 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9790
9791 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9792 &aeLockAvailable, 1);
9793
9794 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9795 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9796 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9797 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9798
9799 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9800 &awbLockAvailable, 1);
9801
9802 int32_t max_input_streams = 1;
9803 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9804 &max_input_streams,
9805 1);
9806
9807 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9808 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9809 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9810 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9811 HAL_PIXEL_FORMAT_YCbCr_420_888};
9812 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9813 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9814
9815 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9816 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9817 &max_latency,
9818 1);
9819
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009820#ifndef USE_HAL_3_3
9821 int32_t isp_sensitivity_range[2];
9822 isp_sensitivity_range[0] =
9823 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9824 isp_sensitivity_range[1] =
9825 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9826 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9827 isp_sensitivity_range,
9828 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9829#endif
9830
Thierry Strudel3d639192016-09-09 11:52:26 -07009831 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9832 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9833 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9834 available_hot_pixel_modes,
9835 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9836
9837 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9838 ANDROID_SHADING_MODE_FAST,
9839 ANDROID_SHADING_MODE_HIGH_QUALITY};
9840 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9841 available_shading_modes,
9842 3);
9843
9844 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9845 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9846 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9847 available_lens_shading_map_modes,
9848 2);
9849
9850 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9851 ANDROID_EDGE_MODE_FAST,
9852 ANDROID_EDGE_MODE_HIGH_QUALITY,
9853 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9854 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9855 available_edge_modes,
9856 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9857
9858 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9859 ANDROID_NOISE_REDUCTION_MODE_FAST,
9860 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9861 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9862 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9863 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9864 available_noise_red_modes,
9865 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9866
9867 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9868 ANDROID_TONEMAP_MODE_FAST,
9869 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9870 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9871 available_tonemap_modes,
9872 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9873
9874 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9875 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9876 available_hot_pixel_map_modes,
9877 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9878
9879 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9880 gCamCapability[cameraId]->reference_illuminant1);
9881 if (NAME_NOT_FOUND != val) {
9882 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9883 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9884 }
9885
9886 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9887 gCamCapability[cameraId]->reference_illuminant2);
9888 if (NAME_NOT_FOUND != val) {
9889 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9890 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9891 }
9892
9893 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9894 (void *)gCamCapability[cameraId]->forward_matrix1,
9895 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9896
9897 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9898 (void *)gCamCapability[cameraId]->forward_matrix2,
9899 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9900
9901 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9902 (void *)gCamCapability[cameraId]->color_transform1,
9903 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9904
9905 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9906 (void *)gCamCapability[cameraId]->color_transform2,
9907 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9908
9909 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9910 (void *)gCamCapability[cameraId]->calibration_transform1,
9911 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9912
9913 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9914 (void *)gCamCapability[cameraId]->calibration_transform2,
9915 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9916
9917 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9918 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9919 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9920 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9921 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9922 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9923 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9924 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9925 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9926 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9927 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9928 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9929 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9930 ANDROID_JPEG_GPS_COORDINATES,
9931 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9932 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9933 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9934 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9935 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9936 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9937 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9938 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9939 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9940 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009941#ifndef USE_HAL_3_3
9942 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9943#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009944 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009945 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009946 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9947 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009948 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009949 /* DevCamDebug metadata request_keys_basic */
9950 DEVCAMDEBUG_META_ENABLE,
9951 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009952 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009953 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009954 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009955 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009956 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009957
9958 size_t request_keys_cnt =
9959 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9960 Vector<int32_t> available_request_keys;
9961 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9962 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9963 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9964 }
9965
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009966 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00009967 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009968 }
9969
Thierry Strudel3d639192016-09-09 11:52:26 -07009970 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9971 available_request_keys.array(), available_request_keys.size());
9972
9973 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9974 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9975 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9976 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9977 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9978 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9979 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9980 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9981 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9982 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9983 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9984 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9985 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9986 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9987 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9988 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9989 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009990 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009991 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9992 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9993 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009994 ANDROID_STATISTICS_FACE_SCORES,
9995#ifndef USE_HAL_3_3
9996 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9997#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009998 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009999 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010000 // DevCamDebug metadata result_keys_basic
10001 DEVCAMDEBUG_META_ENABLE,
10002 // DevCamDebug metadata result_keys AF
10003 DEVCAMDEBUG_AF_LENS_POSITION,
10004 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10005 DEVCAMDEBUG_AF_TOF_DISTANCE,
10006 DEVCAMDEBUG_AF_LUMA,
10007 DEVCAMDEBUG_AF_HAF_STATE,
10008 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10009 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10010 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10011 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10012 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10013 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10014 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10015 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10016 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10017 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10018 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10019 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10020 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10021 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10022 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10023 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10024 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10025 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10026 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10027 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10028 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10029 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10030 // DevCamDebug metadata result_keys AEC
10031 DEVCAMDEBUG_AEC_TARGET_LUMA,
10032 DEVCAMDEBUG_AEC_COMP_LUMA,
10033 DEVCAMDEBUG_AEC_AVG_LUMA,
10034 DEVCAMDEBUG_AEC_CUR_LUMA,
10035 DEVCAMDEBUG_AEC_LINECOUNT,
10036 DEVCAMDEBUG_AEC_REAL_GAIN,
10037 DEVCAMDEBUG_AEC_EXP_INDEX,
10038 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010039 // DevCamDebug metadata result_keys zzHDR
10040 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10041 DEVCAMDEBUG_AEC_L_LINECOUNT,
10042 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10043 DEVCAMDEBUG_AEC_S_LINECOUNT,
10044 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10045 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10046 // DevCamDebug metadata result_keys ADRC
10047 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10048 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10049 DEVCAMDEBUG_AEC_GTM_RATIO,
10050 DEVCAMDEBUG_AEC_LTM_RATIO,
10051 DEVCAMDEBUG_AEC_LA_RATIO,
10052 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010053 // DevCamDebug metadata result_keys AWB
10054 DEVCAMDEBUG_AWB_R_GAIN,
10055 DEVCAMDEBUG_AWB_G_GAIN,
10056 DEVCAMDEBUG_AWB_B_GAIN,
10057 DEVCAMDEBUG_AWB_CCT,
10058 DEVCAMDEBUG_AWB_DECISION,
10059 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010060 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10061 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10062 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010063 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010064 };
10065
Thierry Strudel3d639192016-09-09 11:52:26 -070010066 size_t result_keys_cnt =
10067 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10068
10069 Vector<int32_t> available_result_keys;
10070 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10071 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10072 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10073 }
10074 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10075 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10076 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10077 }
10078 if (supportedFaceDetectMode == 1) {
10079 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10080 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10081 } else if ((supportedFaceDetectMode == 2) ||
10082 (supportedFaceDetectMode == 3)) {
10083 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10084 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10085 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010086#ifndef USE_HAL_3_3
10087 if (hasBlackRegions) {
10088 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10089 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10090 }
10091#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010092
10093 if (gExposeEnableZslKey) {
10094 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10095 }
10096
Thierry Strudel3d639192016-09-09 11:52:26 -070010097 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10098 available_result_keys.array(), available_result_keys.size());
10099
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010100 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010101 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10102 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10103 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10104 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10105 ANDROID_SCALER_CROPPING_TYPE,
10106 ANDROID_SYNC_MAX_LATENCY,
10107 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10108 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10109 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10110 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10111 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10112 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10113 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10114 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10115 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10116 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10117 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10118 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10119 ANDROID_LENS_FACING,
10120 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10121 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10122 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10123 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10124 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10125 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10126 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10127 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10128 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10129 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10130 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10131 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10132 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10133 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10134 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10135 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10136 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10137 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10138 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10139 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010140 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010141 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10142 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10143 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10144 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10145 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10146 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10147 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10148 ANDROID_CONTROL_AVAILABLE_MODES,
10149 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10150 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10151 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10152 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010153 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10154#ifndef USE_HAL_3_3
10155 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10156 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10157#endif
10158 };
10159
10160 Vector<int32_t> available_characteristics_keys;
10161 available_characteristics_keys.appendArray(characteristics_keys_basic,
10162 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10163#ifndef USE_HAL_3_3
10164 if (hasBlackRegions) {
10165 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10166 }
10167#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010168
10169 if (0 <= indexPD) {
10170 int32_t depthKeys[] = {
10171 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10172 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10173 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10174 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10175 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10176 };
10177 available_characteristics_keys.appendArray(depthKeys,
10178 sizeof(depthKeys) / sizeof(depthKeys[0]));
10179 }
10180
Thierry Strudel3d639192016-09-09 11:52:26 -070010181 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010182 available_characteristics_keys.array(),
10183 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010184
10185 /*available stall durations depend on the hw + sw and will be different for different devices */
10186 /*have to add for raw after implementation*/
10187 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10188 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10189
10190 Vector<int64_t> available_stall_durations;
10191 for (uint32_t j = 0; j < stall_formats_count; j++) {
10192 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10193 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10194 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10195 available_stall_durations.add(stall_formats[j]);
10196 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10197 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10198 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10199 }
10200 } else {
10201 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10202 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10203 available_stall_durations.add(stall_formats[j]);
10204 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10205 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10206 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10207 }
10208 }
10209 }
10210 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10211 available_stall_durations.array(),
10212 available_stall_durations.size());
10213
10214 //QCAMERA3_OPAQUE_RAW
10215 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10216 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10217 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10218 case LEGACY_RAW:
10219 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10220 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10221 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10222 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10223 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10224 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10225 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10226 break;
10227 case MIPI_RAW:
10228 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10229 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10230 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10231 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10232 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10233 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10234 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10235 break;
10236 default:
10237 LOGE("unknown opaque_raw_format %d",
10238 gCamCapability[cameraId]->opaque_raw_fmt);
10239 break;
10240 }
10241 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10242
10243 Vector<int32_t> strides;
10244 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10245 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10246 cam_stream_buf_plane_info_t buf_planes;
10247 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10248 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10249 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10250 &gCamCapability[cameraId]->padding_info, &buf_planes);
10251 strides.add(buf_planes.plane_info.mp[0].stride);
10252 }
10253 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10254 strides.size());
10255
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010256 //TBD: remove the following line once backend advertises zzHDR in feature mask
10257 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010258 //Video HDR default
10259 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10260 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010261 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010262 int32_t vhdr_mode[] = {
10263 QCAMERA3_VIDEO_HDR_MODE_OFF,
10264 QCAMERA3_VIDEO_HDR_MODE_ON};
10265
10266 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10267 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10268 vhdr_mode, vhdr_mode_count);
10269 }
10270
Thierry Strudel3d639192016-09-09 11:52:26 -070010271 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10272 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10273 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10274
10275 uint8_t isMonoOnly =
10276 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10277 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10278 &isMonoOnly, 1);
10279
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010280#ifndef USE_HAL_3_3
10281 Vector<int32_t> opaque_size;
10282 for (size_t j = 0; j < scalar_formats_count; j++) {
10283 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10285 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10286 cam_stream_buf_plane_info_t buf_planes;
10287
10288 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10289 &gCamCapability[cameraId]->padding_info, &buf_planes);
10290
10291 if (rc == 0) {
10292 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10293 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10294 opaque_size.add(buf_planes.plane_info.frame_len);
10295 }else {
10296 LOGE("raw frame calculation failed!");
10297 }
10298 }
10299 }
10300 }
10301
10302 if ((opaque_size.size() > 0) &&
10303 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10304 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10305 else
10306 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10307#endif
10308
Thierry Strudel04e026f2016-10-10 11:27:36 -070010309 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10310 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10311 size = 0;
10312 count = CAM_IR_MODE_MAX;
10313 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10314 for (size_t i = 0; i < count; i++) {
10315 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10316 gCamCapability[cameraId]->supported_ir_modes[i]);
10317 if (NAME_NOT_FOUND != val) {
10318 avail_ir_modes[size] = (int32_t)val;
10319 size++;
10320 }
10321 }
10322 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10323 avail_ir_modes, size);
10324 }
10325
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010326 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10327 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10328 size = 0;
10329 count = CAM_AEC_CONVERGENCE_MAX;
10330 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10331 for (size_t i = 0; i < count; i++) {
10332 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10333 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10334 if (NAME_NOT_FOUND != val) {
10335 available_instant_aec_modes[size] = (int32_t)val;
10336 size++;
10337 }
10338 }
10339 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10340 available_instant_aec_modes, size);
10341 }
10342
Thierry Strudel54dc9782017-02-15 12:12:10 -080010343 int32_t sharpness_range[] = {
10344 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10345 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10346 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10347
10348 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10349 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10350 size = 0;
10351 count = CAM_BINNING_CORRECTION_MODE_MAX;
10352 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10353 for (size_t i = 0; i < count; i++) {
10354 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10355 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10356 gCamCapability[cameraId]->supported_binning_modes[i]);
10357 if (NAME_NOT_FOUND != val) {
10358 avail_binning_modes[size] = (int32_t)val;
10359 size++;
10360 }
10361 }
10362 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10363 avail_binning_modes, size);
10364 }
10365
10366 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10367 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10368 size = 0;
10369 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10370 for (size_t i = 0; i < count; i++) {
10371 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10372 gCamCapability[cameraId]->supported_aec_modes[i]);
10373 if (NAME_NOT_FOUND != val)
10374 available_aec_modes[size++] = val;
10375 }
10376 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10377 available_aec_modes, size);
10378 }
10379
10380 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10381 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10382 size = 0;
10383 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10384 for (size_t i = 0; i < count; i++) {
10385 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10386 gCamCapability[cameraId]->supported_iso_modes[i]);
10387 if (NAME_NOT_FOUND != val)
10388 available_iso_modes[size++] = val;
10389 }
10390 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10391 available_iso_modes, size);
10392 }
10393
10394 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010395 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010396 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10397 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10398 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10399
10400 int32_t available_saturation_range[4];
10401 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10402 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10403 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10404 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10405 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10406 available_saturation_range, 4);
10407
10408 uint8_t is_hdr_values[2];
10409 is_hdr_values[0] = 0;
10410 is_hdr_values[1] = 1;
10411 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10412 is_hdr_values, 2);
10413
10414 float is_hdr_confidence_range[2];
10415 is_hdr_confidence_range[0] = 0.0;
10416 is_hdr_confidence_range[1] = 1.0;
10417 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10418 is_hdr_confidence_range, 2);
10419
Emilian Peev0a972ef2017-03-16 10:25:53 +000010420 size_t eepromLength = strnlen(
10421 reinterpret_cast<const char *>(
10422 gCamCapability[cameraId]->eeprom_version_info),
10423 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10424 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010425 char easelInfo[] = ",E:N";
10426 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10427 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10428 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010429 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10430 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010431 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010432 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10433 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10434 }
10435
Thierry Strudel3d639192016-09-09 11:52:26 -070010436 gStaticMetadata[cameraId] = staticInfo.release();
10437 return rc;
10438}
10439
10440/*===========================================================================
10441 * FUNCTION : makeTable
10442 *
10443 * DESCRIPTION: make a table of sizes
10444 *
10445 * PARAMETERS :
10446 *
10447 *
10448 *==========================================================================*/
10449void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10450 size_t max_size, int32_t *sizeTable)
10451{
10452 size_t j = 0;
10453 if (size > max_size) {
10454 size = max_size;
10455 }
10456 for (size_t i = 0; i < size; i++) {
10457 sizeTable[j] = dimTable[i].width;
10458 sizeTable[j+1] = dimTable[i].height;
10459 j+=2;
10460 }
10461}
10462
10463/*===========================================================================
10464 * FUNCTION : makeFPSTable
10465 *
10466 * DESCRIPTION: make a table of fps ranges
10467 *
10468 * PARAMETERS :
10469 *
10470 *==========================================================================*/
10471void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10472 size_t max_size, int32_t *fpsRangesTable)
10473{
10474 size_t j = 0;
10475 if (size > max_size) {
10476 size = max_size;
10477 }
10478 for (size_t i = 0; i < size; i++) {
10479 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10480 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10481 j+=2;
10482 }
10483}
10484
10485/*===========================================================================
10486 * FUNCTION : makeOverridesList
10487 *
10488 * DESCRIPTION: make a list of scene mode overrides
10489 *
10490 * PARAMETERS :
10491 *
10492 *
10493 *==========================================================================*/
10494void QCamera3HardwareInterface::makeOverridesList(
10495 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10496 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10497{
10498 /*daemon will give a list of overrides for all scene modes.
10499 However we should send the fwk only the overrides for the scene modes
10500 supported by the framework*/
10501 size_t j = 0;
10502 if (size > max_size) {
10503 size = max_size;
10504 }
10505 size_t focus_count = CAM_FOCUS_MODE_MAX;
10506 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10507 focus_count);
10508 for (size_t i = 0; i < size; i++) {
10509 bool supt = false;
10510 size_t index = supported_indexes[i];
10511 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10512 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10513 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10514 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10515 overridesTable[index].awb_mode);
10516 if (NAME_NOT_FOUND != val) {
10517 overridesList[j+1] = (uint8_t)val;
10518 }
10519 uint8_t focus_override = overridesTable[index].af_mode;
10520 for (size_t k = 0; k < focus_count; k++) {
10521 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10522 supt = true;
10523 break;
10524 }
10525 }
10526 if (supt) {
10527 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10528 focus_override);
10529 if (NAME_NOT_FOUND != val) {
10530 overridesList[j+2] = (uint8_t)val;
10531 }
10532 } else {
10533 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10534 }
10535 j+=3;
10536 }
10537}
10538
10539/*===========================================================================
10540 * FUNCTION : filterJpegSizes
10541 *
10542 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10543 * could be downscaled to
10544 *
10545 * PARAMETERS :
10546 *
10547 * RETURN : length of jpegSizes array
10548 *==========================================================================*/
10549
10550size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10551 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10552 uint8_t downscale_factor)
10553{
10554 if (0 == downscale_factor) {
10555 downscale_factor = 1;
10556 }
10557
10558 int32_t min_width = active_array_size.width / downscale_factor;
10559 int32_t min_height = active_array_size.height / downscale_factor;
10560 size_t jpegSizesCnt = 0;
10561 if (processedSizesCnt > maxCount) {
10562 processedSizesCnt = maxCount;
10563 }
10564 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10565 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10566 jpegSizes[jpegSizesCnt] = processedSizes[i];
10567 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10568 jpegSizesCnt += 2;
10569 }
10570 }
10571 return jpegSizesCnt;
10572}
10573
10574/*===========================================================================
10575 * FUNCTION : computeNoiseModelEntryS
10576 *
10577 * DESCRIPTION: function to map a given sensitivity to the S noise
10578 * model parameters in the DNG noise model.
10579 *
10580 * PARAMETERS : sens : the sensor sensitivity
10581 *
10582 ** RETURN : S (sensor amplification) noise
10583 *
10584 *==========================================================================*/
10585double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10586 double s = gCamCapability[mCameraId]->gradient_S * sens +
10587 gCamCapability[mCameraId]->offset_S;
10588 return ((s < 0.0) ? 0.0 : s);
10589}
10590
10591/*===========================================================================
10592 * FUNCTION : computeNoiseModelEntryO
10593 *
10594 * DESCRIPTION: function to map a given sensitivity to the O noise
10595 * model parameters in the DNG noise model.
10596 *
10597 * PARAMETERS : sens : the sensor sensitivity
10598 *
10599 ** RETURN : O (sensor readout) noise
10600 *
10601 *==========================================================================*/
10602double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10603 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10604 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10605 1.0 : (1.0 * sens / max_analog_sens);
10606 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10607 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10608 return ((o < 0.0) ? 0.0 : o);
10609}
10610
10611/*===========================================================================
10612 * FUNCTION : getSensorSensitivity
10613 *
10614 * DESCRIPTION: convert iso_mode to an integer value
10615 *
10616 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10617 *
10618 ** RETURN : sensitivity supported by sensor
10619 *
10620 *==========================================================================*/
10621int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10622{
10623 int32_t sensitivity;
10624
10625 switch (iso_mode) {
10626 case CAM_ISO_MODE_100:
10627 sensitivity = 100;
10628 break;
10629 case CAM_ISO_MODE_200:
10630 sensitivity = 200;
10631 break;
10632 case CAM_ISO_MODE_400:
10633 sensitivity = 400;
10634 break;
10635 case CAM_ISO_MODE_800:
10636 sensitivity = 800;
10637 break;
10638 case CAM_ISO_MODE_1600:
10639 sensitivity = 1600;
10640 break;
10641 default:
10642 sensitivity = -1;
10643 break;
10644 }
10645 return sensitivity;
10646}
10647
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010648int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010649 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010650 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10651 // to connect to Easel.
10652 bool doNotpowerOnEasel =
10653 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10654
10655 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010656 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10657 return OK;
10658 }
10659
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010660 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010661 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010662 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010663 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010664 return res;
10665 }
10666
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010667 EaselManagerClientOpened = true;
10668
10669 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010670 if (res != OK) {
10671 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10672 }
10673
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010674 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010675 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010676
10677 // Expose enableZsl key only when HDR+ mode is enabled.
10678 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010679 }
10680
10681 return OK;
10682}
10683
Thierry Strudel3d639192016-09-09 11:52:26 -070010684/*===========================================================================
10685 * FUNCTION : getCamInfo
10686 *
10687 * DESCRIPTION: query camera capabilities
10688 *
10689 * PARAMETERS :
10690 * @cameraId : camera Id
10691 * @info : camera info struct to be filled in with camera capabilities
10692 *
10693 * RETURN : int type of status
10694 * NO_ERROR -- success
10695 * none-zero failure code
10696 *==========================================================================*/
10697int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10698 struct camera_info *info)
10699{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010701 int rc = 0;
10702
10703 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010704
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010705 {
10706 Mutex::Autolock l(gHdrPlusClientLock);
10707 rc = initHdrPlusClientLocked();
10708 if (rc != OK) {
10709 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10710 pthread_mutex_unlock(&gCamLock);
10711 return rc;
10712 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010713 }
10714
Thierry Strudel3d639192016-09-09 11:52:26 -070010715 if (NULL == gCamCapability[cameraId]) {
10716 rc = initCapabilities(cameraId);
10717 if (rc < 0) {
10718 pthread_mutex_unlock(&gCamLock);
10719 return rc;
10720 }
10721 }
10722
10723 if (NULL == gStaticMetadata[cameraId]) {
10724 rc = initStaticMetadata(cameraId);
10725 if (rc < 0) {
10726 pthread_mutex_unlock(&gCamLock);
10727 return rc;
10728 }
10729 }
10730
10731 switch(gCamCapability[cameraId]->position) {
10732 case CAM_POSITION_BACK:
10733 case CAM_POSITION_BACK_AUX:
10734 info->facing = CAMERA_FACING_BACK;
10735 break;
10736
10737 case CAM_POSITION_FRONT:
10738 case CAM_POSITION_FRONT_AUX:
10739 info->facing = CAMERA_FACING_FRONT;
10740 break;
10741
10742 default:
10743 LOGE("Unknown position type %d for camera id:%d",
10744 gCamCapability[cameraId]->position, cameraId);
10745 rc = -1;
10746 break;
10747 }
10748
10749
10750 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010751#ifndef USE_HAL_3_3
10752 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10753#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010754 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010755#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010756 info->static_camera_characteristics = gStaticMetadata[cameraId];
10757
10758 //For now assume both cameras can operate independently.
10759 info->conflicting_devices = NULL;
10760 info->conflicting_devices_length = 0;
10761
10762 //resource cost is 100 * MIN(1.0, m/M),
10763 //where m is throughput requirement with maximum stream configuration
10764 //and M is CPP maximum throughput.
10765 float max_fps = 0.0;
10766 for (uint32_t i = 0;
10767 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10768 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10769 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10770 }
10771 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10772 gCamCapability[cameraId]->active_array_size.width *
10773 gCamCapability[cameraId]->active_array_size.height * max_fps /
10774 gCamCapability[cameraId]->max_pixel_bandwidth;
10775 info->resource_cost = 100 * MIN(1.0, ratio);
10776 LOGI("camera %d resource cost is %d", cameraId,
10777 info->resource_cost);
10778
10779 pthread_mutex_unlock(&gCamLock);
10780 return rc;
10781}
10782
10783/*===========================================================================
10784 * FUNCTION : translateCapabilityToMetadata
10785 *
10786 * DESCRIPTION: translate the capability into camera_metadata_t
10787 *
10788 * PARAMETERS : type of the request
10789 *
10790 *
10791 * RETURN : success: camera_metadata_t*
10792 * failure: NULL
10793 *
10794 *==========================================================================*/
10795camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10796{
10797 if (mDefaultMetadata[type] != NULL) {
10798 return mDefaultMetadata[type];
10799 }
10800 //first time we are handling this request
10801 //fill up the metadata structure using the wrapper class
10802 CameraMetadata settings;
10803 //translate from cam_capability_t to camera_metadata_tag_t
10804 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10805 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10806 int32_t defaultRequestID = 0;
10807 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10808
10809 /* OIS disable */
10810 char ois_prop[PROPERTY_VALUE_MAX];
10811 memset(ois_prop, 0, sizeof(ois_prop));
10812 property_get("persist.camera.ois.disable", ois_prop, "0");
10813 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10814
10815 /* Force video to use OIS */
10816 char videoOisProp[PROPERTY_VALUE_MAX];
10817 memset(videoOisProp, 0, sizeof(videoOisProp));
10818 property_get("persist.camera.ois.video", videoOisProp, "1");
10819 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010820
10821 // Hybrid AE enable/disable
10822 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10823 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10824 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10825 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10826
Thierry Strudel3d639192016-09-09 11:52:26 -070010827 uint8_t controlIntent = 0;
10828 uint8_t focusMode;
10829 uint8_t vsMode;
10830 uint8_t optStabMode;
10831 uint8_t cacMode;
10832 uint8_t edge_mode;
10833 uint8_t noise_red_mode;
10834 uint8_t tonemap_mode;
10835 bool highQualityModeEntryAvailable = FALSE;
10836 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010837 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10839 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010840 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010841 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010842 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010843
Thierry Strudel3d639192016-09-09 11:52:26 -070010844 switch (type) {
10845 case CAMERA3_TEMPLATE_PREVIEW:
10846 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10847 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10848 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10849 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10850 edge_mode = ANDROID_EDGE_MODE_FAST;
10851 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10852 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10853 break;
10854 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10855 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10856 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10857 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10858 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10859 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10860 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10861 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10862 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10863 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10864 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10865 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10866 highQualityModeEntryAvailable = TRUE;
10867 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10868 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10869 fastModeEntryAvailable = TRUE;
10870 }
10871 }
10872 if (highQualityModeEntryAvailable) {
10873 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10874 } else if (fastModeEntryAvailable) {
10875 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10876 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010877 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10878 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10879 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010880 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010881 break;
10882 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10883 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10884 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10885 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010886 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10887 edge_mode = ANDROID_EDGE_MODE_FAST;
10888 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10889 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10890 if (forceVideoOis)
10891 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10892 break;
10893 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10894 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10895 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10896 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010897 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10898 edge_mode = ANDROID_EDGE_MODE_FAST;
10899 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10900 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10901 if (forceVideoOis)
10902 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10903 break;
10904 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10905 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10906 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10907 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10908 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10909 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10910 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10911 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10912 break;
10913 case CAMERA3_TEMPLATE_MANUAL:
10914 edge_mode = ANDROID_EDGE_MODE_FAST;
10915 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10916 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10917 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10918 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10919 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10920 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10921 break;
10922 default:
10923 edge_mode = ANDROID_EDGE_MODE_FAST;
10924 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10925 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10926 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10927 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10928 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10929 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10930 break;
10931 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010932 // Set CAC to OFF if underlying device doesn't support
10933 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10934 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10935 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010936 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10937 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10938 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10939 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10940 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10941 }
10942 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010943 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010944 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010945
10946 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10947 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10948 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10949 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10950 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10951 || ois_disable)
10952 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10953 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010954 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010955
10956 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10957 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10958
10959 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10960 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10961
10962 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10963 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10964
10965 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10966 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10967
10968 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10969 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10970
10971 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10972 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10973
10974 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10975 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10976
10977 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10978 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10979
10980 /*flash*/
10981 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10982 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10983
10984 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10985 settings.update(ANDROID_FLASH_FIRING_POWER,
10986 &flashFiringLevel, 1);
10987
10988 /* lens */
10989 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10990 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10991
10992 if (gCamCapability[mCameraId]->filter_densities_count) {
10993 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10994 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10995 gCamCapability[mCameraId]->filter_densities_count);
10996 }
10997
10998 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10999 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11000
Thierry Strudel3d639192016-09-09 11:52:26 -070011001 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11002 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11003
11004 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11005 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11006
11007 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11008 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11009
11010 /* face detection (default to OFF) */
11011 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11012 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11013
Thierry Strudel54dc9782017-02-15 12:12:10 -080011014 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11015 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011016
11017 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11018 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11019
11020 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11021 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11022
Thierry Strudel3d639192016-09-09 11:52:26 -070011023
11024 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11025 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11026
11027 /* Exposure time(Update the Min Exposure Time)*/
11028 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11029 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11030
11031 /* frame duration */
11032 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11033 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11034
11035 /* sensitivity */
11036 static const int32_t default_sensitivity = 100;
11037 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011038#ifndef USE_HAL_3_3
11039 static const int32_t default_isp_sensitivity =
11040 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11041 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11042#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011043
11044 /*edge mode*/
11045 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11046
11047 /*noise reduction mode*/
11048 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11049
11050 /*color correction mode*/
11051 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11052 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11053
11054 /*transform matrix mode*/
11055 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11056
11057 int32_t scaler_crop_region[4];
11058 scaler_crop_region[0] = 0;
11059 scaler_crop_region[1] = 0;
11060 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11061 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11062 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11063
11064 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11065 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11066
11067 /*focus distance*/
11068 float focus_distance = 0.0;
11069 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11070
11071 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011072 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011073 float max_range = 0.0;
11074 float max_fixed_fps = 0.0;
11075 int32_t fps_range[2] = {0, 0};
11076 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11077 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011078 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11079 TEMPLATE_MAX_PREVIEW_FPS) {
11080 continue;
11081 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011082 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11083 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11084 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11085 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11086 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11087 if (range > max_range) {
11088 fps_range[0] =
11089 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11090 fps_range[1] =
11091 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11092 max_range = range;
11093 }
11094 } else {
11095 if (range < 0.01 && max_fixed_fps <
11096 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11097 fps_range[0] =
11098 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11099 fps_range[1] =
11100 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11101 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11102 }
11103 }
11104 }
11105 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11106
11107 /*precapture trigger*/
11108 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11109 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11110
11111 /*af trigger*/
11112 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11113 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11114
11115 /* ae & af regions */
11116 int32_t active_region[] = {
11117 gCamCapability[mCameraId]->active_array_size.left,
11118 gCamCapability[mCameraId]->active_array_size.top,
11119 gCamCapability[mCameraId]->active_array_size.left +
11120 gCamCapability[mCameraId]->active_array_size.width,
11121 gCamCapability[mCameraId]->active_array_size.top +
11122 gCamCapability[mCameraId]->active_array_size.height,
11123 0};
11124 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11125 sizeof(active_region) / sizeof(active_region[0]));
11126 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11127 sizeof(active_region) / sizeof(active_region[0]));
11128
11129 /* black level lock */
11130 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11131 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11132
Thierry Strudel3d639192016-09-09 11:52:26 -070011133 //special defaults for manual template
11134 if (type == CAMERA3_TEMPLATE_MANUAL) {
11135 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11136 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11137
11138 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11139 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11140
11141 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11142 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11143
11144 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11145 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11146
11147 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11148 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11149
11150 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11151 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11152 }
11153
11154
11155 /* TNR
11156 * We'll use this location to determine which modes TNR will be set.
11157 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11158 * This is not to be confused with linking on a per stream basis that decision
11159 * is still on per-session basis and will be handled as part of config stream
11160 */
11161 uint8_t tnr_enable = 0;
11162
11163 if (m_bTnrPreview || m_bTnrVideo) {
11164
11165 switch (type) {
11166 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11167 tnr_enable = 1;
11168 break;
11169
11170 default:
11171 tnr_enable = 0;
11172 break;
11173 }
11174
11175 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11176 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11177 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11178
11179 LOGD("TNR:%d with process plate %d for template:%d",
11180 tnr_enable, tnr_process_type, type);
11181 }
11182
11183 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011184 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011185 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11186
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011187 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011188 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11189
Shuzhen Wang920ea402017-05-03 08:49:39 -070011190 uint8_t related_camera_id = mCameraId;
11191 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011192
11193 /* CDS default */
11194 char prop[PROPERTY_VALUE_MAX];
11195 memset(prop, 0, sizeof(prop));
11196 property_get("persist.camera.CDS", prop, "Auto");
11197 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11198 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11199 if (CAM_CDS_MODE_MAX == cds_mode) {
11200 cds_mode = CAM_CDS_MODE_AUTO;
11201 }
11202
11203 /* Disabling CDS in templates which have TNR enabled*/
11204 if (tnr_enable)
11205 cds_mode = CAM_CDS_MODE_OFF;
11206
11207 int32_t mode = cds_mode;
11208 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011209
Thierry Strudel269c81a2016-10-12 12:13:59 -070011210 /* Manual Convergence AEC Speed is disabled by default*/
11211 float default_aec_speed = 0;
11212 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11213
11214 /* Manual Convergence AWB Speed is disabled by default*/
11215 float default_awb_speed = 0;
11216 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11217
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011218 // Set instant AEC to normal convergence by default
11219 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11220 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11221
Shuzhen Wang19463d72016-03-08 11:09:52 -080011222 /* hybrid ae */
11223 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11224
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011225 if (gExposeEnableZslKey) {
11226 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11227 }
11228
Thierry Strudel3d639192016-09-09 11:52:26 -070011229 mDefaultMetadata[type] = settings.release();
11230
11231 return mDefaultMetadata[type];
11232}
11233
11234/*===========================================================================
11235 * FUNCTION : setFrameParameters
11236 *
11237 * DESCRIPTION: set parameters per frame as requested in the metadata from
11238 * framework
11239 *
11240 * PARAMETERS :
11241 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011242 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011243 * @blob_request: Whether this request is a blob request or not
11244 *
11245 * RETURN : success: NO_ERROR
11246 * failure:
11247 *==========================================================================*/
11248int QCamera3HardwareInterface::setFrameParameters(
11249 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011250 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011251 int blob_request,
11252 uint32_t snapshotStreamId)
11253{
11254 /*translate from camera_metadata_t type to parm_type_t*/
11255 int rc = 0;
11256 int32_t hal_version = CAM_HAL_V3;
11257
11258 clear_metadata_buffer(mParameters);
11259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11260 LOGE("Failed to set hal version in the parameters");
11261 return BAD_VALUE;
11262 }
11263
11264 /*we need to update the frame number in the parameters*/
11265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11266 request->frame_number)) {
11267 LOGE("Failed to set the frame number in the parameters");
11268 return BAD_VALUE;
11269 }
11270
11271 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 LOGE("Failed to set stream type mask in the parameters");
11274 return BAD_VALUE;
11275 }
11276
11277 if (mUpdateDebugLevel) {
11278 uint32_t dummyDebugLevel = 0;
11279 /* The value of dummyDebugLevel is irrelavent. On
11280 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11282 dummyDebugLevel)) {
11283 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11284 return BAD_VALUE;
11285 }
11286 mUpdateDebugLevel = false;
11287 }
11288
11289 if(request->settings != NULL){
11290 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11291 if (blob_request)
11292 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11293 }
11294
11295 return rc;
11296}
11297
11298/*===========================================================================
11299 * FUNCTION : setReprocParameters
11300 *
11301 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11302 * return it.
11303 *
11304 * PARAMETERS :
11305 * @request : request that needs to be serviced
11306 *
11307 * RETURN : success: NO_ERROR
11308 * failure:
11309 *==========================================================================*/
11310int32_t QCamera3HardwareInterface::setReprocParameters(
11311 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11312 uint32_t snapshotStreamId)
11313{
11314 /*translate from camera_metadata_t type to parm_type_t*/
11315 int rc = 0;
11316
11317 if (NULL == request->settings){
11318 LOGE("Reprocess settings cannot be NULL");
11319 return BAD_VALUE;
11320 }
11321
11322 if (NULL == reprocParam) {
11323 LOGE("Invalid reprocessing metadata buffer");
11324 return BAD_VALUE;
11325 }
11326 clear_metadata_buffer(reprocParam);
11327
11328 /*we need to update the frame number in the parameters*/
11329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11330 request->frame_number)) {
11331 LOGE("Failed to set the frame number in the parameters");
11332 return BAD_VALUE;
11333 }
11334
11335 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11336 if (rc < 0) {
11337 LOGE("Failed to translate reproc request");
11338 return rc;
11339 }
11340
11341 CameraMetadata frame_settings;
11342 frame_settings = request->settings;
11343 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11344 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11345 int32_t *crop_count =
11346 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11347 int32_t *crop_data =
11348 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11349 int32_t *roi_map =
11350 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11351 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11352 cam_crop_data_t crop_meta;
11353 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11354 crop_meta.num_of_streams = 1;
11355 crop_meta.crop_info[0].crop.left = crop_data[0];
11356 crop_meta.crop_info[0].crop.top = crop_data[1];
11357 crop_meta.crop_info[0].crop.width = crop_data[2];
11358 crop_meta.crop_info[0].crop.height = crop_data[3];
11359
11360 crop_meta.crop_info[0].roi_map.left =
11361 roi_map[0];
11362 crop_meta.crop_info[0].roi_map.top =
11363 roi_map[1];
11364 crop_meta.crop_info[0].roi_map.width =
11365 roi_map[2];
11366 crop_meta.crop_info[0].roi_map.height =
11367 roi_map[3];
11368
11369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11370 rc = BAD_VALUE;
11371 }
11372 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11373 request->input_buffer->stream,
11374 crop_meta.crop_info[0].crop.left,
11375 crop_meta.crop_info[0].crop.top,
11376 crop_meta.crop_info[0].crop.width,
11377 crop_meta.crop_info[0].crop.height);
11378 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11379 request->input_buffer->stream,
11380 crop_meta.crop_info[0].roi_map.left,
11381 crop_meta.crop_info[0].roi_map.top,
11382 crop_meta.crop_info[0].roi_map.width,
11383 crop_meta.crop_info[0].roi_map.height);
11384 } else {
11385 LOGE("Invalid reprocess crop count %d!", *crop_count);
11386 }
11387 } else {
11388 LOGE("No crop data from matching output stream");
11389 }
11390
11391 /* These settings are not needed for regular requests so handle them specially for
11392 reprocess requests; information needed for EXIF tags */
11393 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11394 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11395 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11396 if (NAME_NOT_FOUND != val) {
11397 uint32_t flashMode = (uint32_t)val;
11398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11399 rc = BAD_VALUE;
11400 }
11401 } else {
11402 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11403 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11404 }
11405 } else {
11406 LOGH("No flash mode in reprocess settings");
11407 }
11408
11409 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11410 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11412 rc = BAD_VALUE;
11413 }
11414 } else {
11415 LOGH("No flash state in reprocess settings");
11416 }
11417
11418 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11419 uint8_t *reprocessFlags =
11420 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11421 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11422 *reprocessFlags)) {
11423 rc = BAD_VALUE;
11424 }
11425 }
11426
Thierry Strudel54dc9782017-02-15 12:12:10 -080011427 // Add exif debug data to internal metadata
11428 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11429 mm_jpeg_debug_exif_params_t *debug_params =
11430 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11431 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11432 // AE
11433 if (debug_params->ae_debug_params_valid == TRUE) {
11434 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11435 debug_params->ae_debug_params);
11436 }
11437 // AWB
11438 if (debug_params->awb_debug_params_valid == TRUE) {
11439 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11440 debug_params->awb_debug_params);
11441 }
11442 // AF
11443 if (debug_params->af_debug_params_valid == TRUE) {
11444 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11445 debug_params->af_debug_params);
11446 }
11447 // ASD
11448 if (debug_params->asd_debug_params_valid == TRUE) {
11449 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11450 debug_params->asd_debug_params);
11451 }
11452 // Stats
11453 if (debug_params->stats_debug_params_valid == TRUE) {
11454 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11455 debug_params->stats_debug_params);
11456 }
11457 // BE Stats
11458 if (debug_params->bestats_debug_params_valid == TRUE) {
11459 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11460 debug_params->bestats_debug_params);
11461 }
11462 // BHIST
11463 if (debug_params->bhist_debug_params_valid == TRUE) {
11464 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11465 debug_params->bhist_debug_params);
11466 }
11467 // 3A Tuning
11468 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11469 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11470 debug_params->q3a_tuning_debug_params);
11471 }
11472 }
11473
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011474 // Add metadata which reprocess needs
11475 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11476 cam_reprocess_info_t *repro_info =
11477 (cam_reprocess_info_t *)frame_settings.find
11478 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011479 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011480 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011481 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011482 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011483 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011484 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011485 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011486 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011487 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011488 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011489 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011490 repro_info->pipeline_flip);
11491 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11492 repro_info->af_roi);
11493 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11494 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011495 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11496 CAM_INTF_PARM_ROTATION metadata then has been added in
11497 translateToHalMetadata. HAL need to keep this new rotation
11498 metadata. Otherwise, the old rotation info saved in the vendor tag
11499 would be used */
11500 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11501 CAM_INTF_PARM_ROTATION, reprocParam) {
11502 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11503 } else {
11504 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011505 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011506 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011507 }
11508
11509 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11510 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11511 roi.width and roi.height would be the final JPEG size.
11512 For now, HAL only checks this for reprocess request */
11513 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11514 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11515 uint8_t *enable =
11516 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11517 if (*enable == TRUE) {
11518 int32_t *crop_data =
11519 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11520 cam_stream_crop_info_t crop_meta;
11521 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11522 crop_meta.stream_id = 0;
11523 crop_meta.crop.left = crop_data[0];
11524 crop_meta.crop.top = crop_data[1];
11525 crop_meta.crop.width = crop_data[2];
11526 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011527 // The JPEG crop roi should match cpp output size
11528 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11529 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11530 crop_meta.roi_map.left = 0;
11531 crop_meta.roi_map.top = 0;
11532 crop_meta.roi_map.width = cpp_crop->crop.width;
11533 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011534 }
11535 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11536 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011537 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011538 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011539 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11540 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011541 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011542 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11543
11544 // Add JPEG scale information
11545 cam_dimension_t scale_dim;
11546 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11547 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11548 int32_t *roi =
11549 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11550 scale_dim.width = roi[2];
11551 scale_dim.height = roi[3];
11552 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11553 scale_dim);
11554 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11555 scale_dim.width, scale_dim.height, mCameraId);
11556 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011557 }
11558 }
11559
11560 return rc;
11561}
11562
11563/*===========================================================================
11564 * FUNCTION : saveRequestSettings
11565 *
11566 * DESCRIPTION: Add any settings that might have changed to the request settings
11567 * and save the settings to be applied on the frame
11568 *
11569 * PARAMETERS :
11570 * @jpegMetadata : the extracted and/or modified jpeg metadata
11571 * @request : request with initial settings
11572 *
11573 * RETURN :
11574 * camera_metadata_t* : pointer to the saved request settings
11575 *==========================================================================*/
11576camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11577 const CameraMetadata &jpegMetadata,
11578 camera3_capture_request_t *request)
11579{
11580 camera_metadata_t *resultMetadata;
11581 CameraMetadata camMetadata;
11582 camMetadata = request->settings;
11583
11584 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11585 int32_t thumbnail_size[2];
11586 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11587 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11588 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11589 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11590 }
11591
11592 if (request->input_buffer != NULL) {
11593 uint8_t reprocessFlags = 1;
11594 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11595 (uint8_t*)&reprocessFlags,
11596 sizeof(reprocessFlags));
11597 }
11598
11599 resultMetadata = camMetadata.release();
11600 return resultMetadata;
11601}
11602
11603/*===========================================================================
11604 * FUNCTION : setHalFpsRange
11605 *
11606 * DESCRIPTION: set FPS range parameter
11607 *
11608 *
11609 * PARAMETERS :
11610 * @settings : Metadata from framework
11611 * @hal_metadata: Metadata buffer
11612 *
11613 *
11614 * RETURN : success: NO_ERROR
11615 * failure:
11616 *==========================================================================*/
11617int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11618 metadata_buffer_t *hal_metadata)
11619{
11620 int32_t rc = NO_ERROR;
11621 cam_fps_range_t fps_range;
11622 fps_range.min_fps = (float)
11623 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11624 fps_range.max_fps = (float)
11625 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11626 fps_range.video_min_fps = fps_range.min_fps;
11627 fps_range.video_max_fps = fps_range.max_fps;
11628
11629 LOGD("aeTargetFpsRange fps: [%f %f]",
11630 fps_range.min_fps, fps_range.max_fps);
11631 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11632 * follows:
11633 * ---------------------------------------------------------------|
11634 * Video stream is absent in configure_streams |
11635 * (Camcorder preview before the first video record |
11636 * ---------------------------------------------------------------|
11637 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11638 * | | | vid_min/max_fps|
11639 * ---------------------------------------------------------------|
11640 * NO | [ 30, 240] | 240 | [240, 240] |
11641 * |-------------|-------------|----------------|
11642 * | [240, 240] | 240 | [240, 240] |
11643 * ---------------------------------------------------------------|
11644 * Video stream is present in configure_streams |
11645 * ---------------------------------------------------------------|
11646 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11647 * | | | vid_min/max_fps|
11648 * ---------------------------------------------------------------|
11649 * NO | [ 30, 240] | 240 | [240, 240] |
11650 * (camcorder prev |-------------|-------------|----------------|
11651 * after video rec | [240, 240] | 240 | [240, 240] |
11652 * is stopped) | | | |
11653 * ---------------------------------------------------------------|
11654 * YES | [ 30, 240] | 240 | [240, 240] |
11655 * |-------------|-------------|----------------|
11656 * | [240, 240] | 240 | [240, 240] |
11657 * ---------------------------------------------------------------|
11658 * When Video stream is absent in configure_streams,
11659 * preview fps = sensor_fps / batchsize
11660 * Eg: for 240fps at batchSize 4, preview = 60fps
11661 * for 120fps at batchSize 4, preview = 30fps
11662 *
11663 * When video stream is present in configure_streams, preview fps is as per
11664 * the ratio of preview buffers to video buffers requested in process
11665 * capture request
11666 */
11667 mBatchSize = 0;
11668 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11669 fps_range.min_fps = fps_range.video_max_fps;
11670 fps_range.video_min_fps = fps_range.video_max_fps;
11671 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11672 fps_range.max_fps);
11673 if (NAME_NOT_FOUND != val) {
11674 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11675 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11676 return BAD_VALUE;
11677 }
11678
11679 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11680 /* If batchmode is currently in progress and the fps changes,
11681 * set the flag to restart the sensor */
11682 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11683 (mHFRVideoFps != fps_range.max_fps)) {
11684 mNeedSensorRestart = true;
11685 }
11686 mHFRVideoFps = fps_range.max_fps;
11687 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11688 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11689 mBatchSize = MAX_HFR_BATCH_SIZE;
11690 }
11691 }
11692 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11693
11694 }
11695 } else {
11696 /* HFR mode is session param in backend/ISP. This should be reset when
11697 * in non-HFR mode */
11698 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11700 return BAD_VALUE;
11701 }
11702 }
11703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11704 return BAD_VALUE;
11705 }
11706 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11707 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11708 return rc;
11709}
11710
11711/*===========================================================================
11712 * FUNCTION : translateToHalMetadata
11713 *
11714 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11715 *
11716 *
11717 * PARAMETERS :
11718 * @request : request sent from framework
11719 *
11720 *
11721 * RETURN : success: NO_ERROR
11722 * failure:
11723 *==========================================================================*/
11724int QCamera3HardwareInterface::translateToHalMetadata
11725 (const camera3_capture_request_t *request,
11726 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011727 uint32_t snapshotStreamId) {
11728 if (request == nullptr || hal_metadata == nullptr) {
11729 return BAD_VALUE;
11730 }
11731
11732 int64_t minFrameDuration = getMinFrameDuration(request);
11733
11734 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11735 minFrameDuration);
11736}
11737
11738int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11739 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11740 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11741
Thierry Strudel3d639192016-09-09 11:52:26 -070011742 int rc = 0;
11743 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011744 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011745
11746 /* Do not change the order of the following list unless you know what you are
11747 * doing.
11748 * The order is laid out in such a way that parameters in the front of the table
11749 * may be used to override the parameters later in the table. Examples are:
11750 * 1. META_MODE should precede AEC/AWB/AF MODE
11751 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11752 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11753 * 4. Any mode should precede it's corresponding settings
11754 */
11755 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11756 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11758 rc = BAD_VALUE;
11759 }
11760 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11761 if (rc != NO_ERROR) {
11762 LOGE("extractSceneMode failed");
11763 }
11764 }
11765
11766 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11767 uint8_t fwk_aeMode =
11768 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11769 uint8_t aeMode;
11770 int32_t redeye;
11771
11772 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11773 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011774 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11775 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011776 } else {
11777 aeMode = CAM_AE_MODE_ON;
11778 }
11779 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11780 redeye = 1;
11781 } else {
11782 redeye = 0;
11783 }
11784
11785 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11786 fwk_aeMode);
11787 if (NAME_NOT_FOUND != val) {
11788 int32_t flashMode = (int32_t)val;
11789 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11790 }
11791
11792 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11794 rc = BAD_VALUE;
11795 }
11796 }
11797
11798 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11799 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11800 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11801 fwk_whiteLevel);
11802 if (NAME_NOT_FOUND != val) {
11803 uint8_t whiteLevel = (uint8_t)val;
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11805 rc = BAD_VALUE;
11806 }
11807 }
11808 }
11809
11810 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11811 uint8_t fwk_cacMode =
11812 frame_settings.find(
11813 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11814 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11815 fwk_cacMode);
11816 if (NAME_NOT_FOUND != val) {
11817 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11818 bool entryAvailable = FALSE;
11819 // Check whether Frameworks set CAC mode is supported in device or not
11820 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11821 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11822 entryAvailable = TRUE;
11823 break;
11824 }
11825 }
11826 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11827 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11828 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11829 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11830 if (entryAvailable == FALSE) {
11831 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11832 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11833 } else {
11834 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11835 // High is not supported and so set the FAST as spec say's underlying
11836 // device implementation can be the same for both modes.
11837 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11838 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11839 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11840 // in order to avoid the fps drop due to high quality
11841 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11842 } else {
11843 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11844 }
11845 }
11846 }
11847 LOGD("Final cacMode is %d", cacMode);
11848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11849 rc = BAD_VALUE;
11850 }
11851 } else {
11852 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11853 }
11854 }
11855
Thierry Strudel2896d122017-02-23 19:18:03 -080011856 char af_value[PROPERTY_VALUE_MAX];
11857 property_get("persist.camera.af.infinity", af_value, "0");
11858
Jason Lee84ae9972017-02-24 13:24:24 -080011859 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011860 if (atoi(af_value) == 0) {
11861 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011862 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011863 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11864 fwk_focusMode);
11865 if (NAME_NOT_FOUND != val) {
11866 uint8_t focusMode = (uint8_t)val;
11867 LOGD("set focus mode %d", focusMode);
11868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11869 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11870 rc = BAD_VALUE;
11871 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011872 }
11873 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011874 } else {
11875 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11876 LOGE("Focus forced to infinity %d", focusMode);
11877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11878 rc = BAD_VALUE;
11879 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011880 }
11881
Jason Lee84ae9972017-02-24 13:24:24 -080011882 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11883 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011884 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11886 focalDistance)) {
11887 rc = BAD_VALUE;
11888 }
11889 }
11890
11891 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11892 uint8_t fwk_antibandingMode =
11893 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11894 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11895 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11896 if (NAME_NOT_FOUND != val) {
11897 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011898 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11899 if (m60HzZone) {
11900 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11901 } else {
11902 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11903 }
11904 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11906 hal_antibandingMode)) {
11907 rc = BAD_VALUE;
11908 }
11909 }
11910 }
11911
11912 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11913 int32_t expCompensation = frame_settings.find(
11914 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11915 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11916 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11917 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11918 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011919 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011920 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11921 expCompensation)) {
11922 rc = BAD_VALUE;
11923 }
11924 }
11925
11926 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11927 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11929 rc = BAD_VALUE;
11930 }
11931 }
11932 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11933 rc = setHalFpsRange(frame_settings, hal_metadata);
11934 if (rc != NO_ERROR) {
11935 LOGE("setHalFpsRange failed");
11936 }
11937 }
11938
11939 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11940 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11941 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11942 rc = BAD_VALUE;
11943 }
11944 }
11945
11946 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11947 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11948 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11949 fwk_effectMode);
11950 if (NAME_NOT_FOUND != val) {
11951 uint8_t effectMode = (uint8_t)val;
11952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11953 rc = BAD_VALUE;
11954 }
11955 }
11956 }
11957
11958 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11959 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11961 colorCorrectMode)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965
11966 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11967 cam_color_correct_gains_t colorCorrectGains;
11968 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11969 colorCorrectGains.gains[i] =
11970 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11971 }
11972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11973 colorCorrectGains)) {
11974 rc = BAD_VALUE;
11975 }
11976 }
11977
11978 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11979 cam_color_correct_matrix_t colorCorrectTransform;
11980 cam_rational_type_t transform_elem;
11981 size_t num = 0;
11982 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11983 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11984 transform_elem.numerator =
11985 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11986 transform_elem.denominator =
11987 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11988 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11989 num++;
11990 }
11991 }
11992 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11993 colorCorrectTransform)) {
11994 rc = BAD_VALUE;
11995 }
11996 }
11997
11998 cam_trigger_t aecTrigger;
11999 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12000 aecTrigger.trigger_id = -1;
12001 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12002 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12003 aecTrigger.trigger =
12004 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12005 aecTrigger.trigger_id =
12006 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12008 aecTrigger)) {
12009 rc = BAD_VALUE;
12010 }
12011 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12012 aecTrigger.trigger, aecTrigger.trigger_id);
12013 }
12014
12015 /*af_trigger must come with a trigger id*/
12016 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12017 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12018 cam_trigger_t af_trigger;
12019 af_trigger.trigger =
12020 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12021 af_trigger.trigger_id =
12022 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12024 rc = BAD_VALUE;
12025 }
12026 LOGD("AfTrigger: %d AfTriggerID: %d",
12027 af_trigger.trigger, af_trigger.trigger_id);
12028 }
12029
12030 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12031 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12033 rc = BAD_VALUE;
12034 }
12035 }
12036 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12037 cam_edge_application_t edge_application;
12038 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012039
Thierry Strudel3d639192016-09-09 11:52:26 -070012040 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12041 edge_application.sharpness = 0;
12042 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012043 edge_application.sharpness =
12044 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12045 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12046 int32_t sharpness =
12047 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12048 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12049 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12050 LOGD("Setting edge mode sharpness %d", sharpness);
12051 edge_application.sharpness = sharpness;
12052 }
12053 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012054 }
12055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12056 rc = BAD_VALUE;
12057 }
12058 }
12059
12060 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12061 int32_t respectFlashMode = 1;
12062 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12063 uint8_t fwk_aeMode =
12064 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012065 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12066 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12067 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012068 respectFlashMode = 0;
12069 LOGH("AE Mode controls flash, ignore android.flash.mode");
12070 }
12071 }
12072 if (respectFlashMode) {
12073 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12074 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12075 LOGH("flash mode after mapping %d", val);
12076 // To check: CAM_INTF_META_FLASH_MODE usage
12077 if (NAME_NOT_FOUND != val) {
12078 uint8_t flashMode = (uint8_t)val;
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083 }
12084 }
12085
12086 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12087 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092
12093 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12094 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12096 flashFiringTime)) {
12097 rc = BAD_VALUE;
12098 }
12099 }
12100
12101 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12102 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12103 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12104 hotPixelMode)) {
12105 rc = BAD_VALUE;
12106 }
12107 }
12108
12109 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12110 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12112 lensAperture)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116
12117 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12118 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12120 filterDensity)) {
12121 rc = BAD_VALUE;
12122 }
12123 }
12124
12125 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12126 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12127 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12128 focalLength)) {
12129 rc = BAD_VALUE;
12130 }
12131 }
12132
12133 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12134 uint8_t optStabMode =
12135 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12137 optStabMode)) {
12138 rc = BAD_VALUE;
12139 }
12140 }
12141
12142 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12143 uint8_t videoStabMode =
12144 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12145 LOGD("videoStabMode from APP = %d", videoStabMode);
12146 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12147 videoStabMode)) {
12148 rc = BAD_VALUE;
12149 }
12150 }
12151
12152
12153 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12154 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12156 noiseRedMode)) {
12157 rc = BAD_VALUE;
12158 }
12159 }
12160
12161 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12162 float reprocessEffectiveExposureFactor =
12163 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12165 reprocessEffectiveExposureFactor)) {
12166 rc = BAD_VALUE;
12167 }
12168 }
12169
12170 cam_crop_region_t scalerCropRegion;
12171 bool scalerCropSet = false;
12172 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12173 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12174 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12175 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12176 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12177
12178 // Map coordinate system from active array to sensor output.
12179 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12180 scalerCropRegion.width, scalerCropRegion.height);
12181
12182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12183 scalerCropRegion)) {
12184 rc = BAD_VALUE;
12185 }
12186 scalerCropSet = true;
12187 }
12188
12189 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12190 int64_t sensorExpTime =
12191 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12192 LOGD("setting sensorExpTime %lld", sensorExpTime);
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12194 sensorExpTime)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12200 int64_t sensorFrameDuration =
12201 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012202 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12203 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12204 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12205 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12207 sensorFrameDuration)) {
12208 rc = BAD_VALUE;
12209 }
12210 }
12211
12212 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12213 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12214 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12215 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12216 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12217 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12218 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12220 sensorSensitivity)) {
12221 rc = BAD_VALUE;
12222 }
12223 }
12224
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012225#ifndef USE_HAL_3_3
12226 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12227 int32_t ispSensitivity =
12228 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12229 if (ispSensitivity <
12230 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12231 ispSensitivity =
12232 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12233 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12234 }
12235 if (ispSensitivity >
12236 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12237 ispSensitivity =
12238 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12239 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12240 }
12241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12242 ispSensitivity)) {
12243 rc = BAD_VALUE;
12244 }
12245 }
12246#endif
12247
Thierry Strudel3d639192016-09-09 11:52:26 -070012248 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12249 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
12255 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12256 uint8_t fwk_facedetectMode =
12257 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12258
12259 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12260 fwk_facedetectMode);
12261
12262 if (NAME_NOT_FOUND != val) {
12263 uint8_t facedetectMode = (uint8_t)val;
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12265 facedetectMode)) {
12266 rc = BAD_VALUE;
12267 }
12268 }
12269 }
12270
Thierry Strudel54dc9782017-02-15 12:12:10 -080012271 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012272 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012273 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12275 histogramMode)) {
12276 rc = BAD_VALUE;
12277 }
12278 }
12279
12280 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12281 uint8_t sharpnessMapMode =
12282 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12284 sharpnessMapMode)) {
12285 rc = BAD_VALUE;
12286 }
12287 }
12288
12289 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12290 uint8_t tonemapMode =
12291 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12293 rc = BAD_VALUE;
12294 }
12295 }
12296 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12297 /*All tonemap channels will have the same number of points*/
12298 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12299 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12300 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12301 cam_rgb_tonemap_curves tonemapCurves;
12302 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12303 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12304 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12305 tonemapCurves.tonemap_points_cnt,
12306 CAM_MAX_TONEMAP_CURVE_SIZE);
12307 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12308 }
12309
12310 /* ch0 = G*/
12311 size_t point = 0;
12312 cam_tonemap_curve_t tonemapCurveGreen;
12313 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12314 for (size_t j = 0; j < 2; j++) {
12315 tonemapCurveGreen.tonemap_points[i][j] =
12316 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12317 point++;
12318 }
12319 }
12320 tonemapCurves.curves[0] = tonemapCurveGreen;
12321
12322 /* ch 1 = B */
12323 point = 0;
12324 cam_tonemap_curve_t tonemapCurveBlue;
12325 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12326 for (size_t j = 0; j < 2; j++) {
12327 tonemapCurveBlue.tonemap_points[i][j] =
12328 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12329 point++;
12330 }
12331 }
12332 tonemapCurves.curves[1] = tonemapCurveBlue;
12333
12334 /* ch 2 = R */
12335 point = 0;
12336 cam_tonemap_curve_t tonemapCurveRed;
12337 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12338 for (size_t j = 0; j < 2; j++) {
12339 tonemapCurveRed.tonemap_points[i][j] =
12340 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12341 point++;
12342 }
12343 }
12344 tonemapCurves.curves[2] = tonemapCurveRed;
12345
12346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12347 tonemapCurves)) {
12348 rc = BAD_VALUE;
12349 }
12350 }
12351
12352 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12353 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12355 captureIntent)) {
12356 rc = BAD_VALUE;
12357 }
12358 }
12359
12360 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12361 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12362 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12363 blackLevelLock)) {
12364 rc = BAD_VALUE;
12365 }
12366 }
12367
12368 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12369 uint8_t lensShadingMapMode =
12370 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12372 lensShadingMapMode)) {
12373 rc = BAD_VALUE;
12374 }
12375 }
12376
12377 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12378 cam_area_t roi;
12379 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012380 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012381
12382 // Map coordinate system from active array to sensor output.
12383 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12384 roi.rect.height);
12385
12386 if (scalerCropSet) {
12387 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12388 }
12389 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12390 rc = BAD_VALUE;
12391 }
12392 }
12393
12394 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12395 cam_area_t roi;
12396 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012397 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012398
12399 // Map coordinate system from active array to sensor output.
12400 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12401 roi.rect.height);
12402
12403 if (scalerCropSet) {
12404 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12405 }
12406 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12407 rc = BAD_VALUE;
12408 }
12409 }
12410
12411 // CDS for non-HFR non-video mode
12412 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12413 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12414 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12415 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12416 LOGE("Invalid CDS mode %d!", *fwk_cds);
12417 } else {
12418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12419 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12420 rc = BAD_VALUE;
12421 }
12422 }
12423 }
12424
Thierry Strudel04e026f2016-10-10 11:27:36 -070012425 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012426 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012427 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012428 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12429 }
12430 if (m_bVideoHdrEnabled)
12431 vhdr = CAM_VIDEO_HDR_MODE_ON;
12432
Thierry Strudel54dc9782017-02-15 12:12:10 -080012433 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12434
12435 if(vhdr != curr_hdr_state)
12436 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12437
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012438 rc = setVideoHdrMode(mParameters, vhdr);
12439 if (rc != NO_ERROR) {
12440 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012441 }
12442
12443 //IR
12444 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12445 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12446 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012447 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12448 uint8_t isIRon = 0;
12449
12450 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012451 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12452 LOGE("Invalid IR mode %d!", fwk_ir);
12453 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012454 if(isIRon != curr_ir_state )
12455 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12456
Thierry Strudel04e026f2016-10-10 11:27:36 -070012457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12458 CAM_INTF_META_IR_MODE, fwk_ir)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462 }
12463
Thierry Strudel54dc9782017-02-15 12:12:10 -080012464 //Binning Correction Mode
12465 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12466 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12467 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12468 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12469 || (0 > fwk_binning_correction)) {
12470 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12471 } else {
12472 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12473 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477 }
12478
Thierry Strudel269c81a2016-10-12 12:13:59 -070012479 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12480 float aec_speed;
12481 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12482 LOGD("AEC Speed :%f", aec_speed);
12483 if ( aec_speed < 0 ) {
12484 LOGE("Invalid AEC mode %f!", aec_speed);
12485 } else {
12486 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12487 aec_speed)) {
12488 rc = BAD_VALUE;
12489 }
12490 }
12491 }
12492
12493 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12494 float awb_speed;
12495 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12496 LOGD("AWB Speed :%f", awb_speed);
12497 if ( awb_speed < 0 ) {
12498 LOGE("Invalid AWB mode %f!", awb_speed);
12499 } else {
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12501 awb_speed)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505 }
12506
Thierry Strudel3d639192016-09-09 11:52:26 -070012507 // TNR
12508 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12509 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12510 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012511 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012512 cam_denoise_param_t tnr;
12513 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12514 tnr.process_plates =
12515 (cam_denoise_process_type_t)frame_settings.find(
12516 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12517 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012518
12519 if(b_TnrRequested != curr_tnr_state)
12520 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12521
Thierry Strudel3d639192016-09-09 11:52:26 -070012522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12523 rc = BAD_VALUE;
12524 }
12525 }
12526
Thierry Strudel54dc9782017-02-15 12:12:10 -080012527 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012528 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012529 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12531 *exposure_metering_mode)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535
Thierry Strudel3d639192016-09-09 11:52:26 -070012536 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12537 int32_t fwk_testPatternMode =
12538 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12539 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12540 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12541
12542 if (NAME_NOT_FOUND != testPatternMode) {
12543 cam_test_pattern_data_t testPatternData;
12544 memset(&testPatternData, 0, sizeof(testPatternData));
12545 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12546 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12547 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12548 int32_t *fwk_testPatternData =
12549 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12550 testPatternData.r = fwk_testPatternData[0];
12551 testPatternData.b = fwk_testPatternData[3];
12552 switch (gCamCapability[mCameraId]->color_arrangement) {
12553 case CAM_FILTER_ARRANGEMENT_RGGB:
12554 case CAM_FILTER_ARRANGEMENT_GRBG:
12555 testPatternData.gr = fwk_testPatternData[1];
12556 testPatternData.gb = fwk_testPatternData[2];
12557 break;
12558 case CAM_FILTER_ARRANGEMENT_GBRG:
12559 case CAM_FILTER_ARRANGEMENT_BGGR:
12560 testPatternData.gr = fwk_testPatternData[2];
12561 testPatternData.gb = fwk_testPatternData[1];
12562 break;
12563 default:
12564 LOGE("color arrangement %d is not supported",
12565 gCamCapability[mCameraId]->color_arrangement);
12566 break;
12567 }
12568 }
12569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12570 testPatternData)) {
12571 rc = BAD_VALUE;
12572 }
12573 } else {
12574 LOGE("Invalid framework sensor test pattern mode %d",
12575 fwk_testPatternMode);
12576 }
12577 }
12578
12579 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12580 size_t count = 0;
12581 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12582 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12583 gps_coords.data.d, gps_coords.count, count);
12584 if (gps_coords.count != count) {
12585 rc = BAD_VALUE;
12586 }
12587 }
12588
12589 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12590 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12591 size_t count = 0;
12592 const char *gps_methods_src = (const char *)
12593 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12594 memset(gps_methods, '\0', sizeof(gps_methods));
12595 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12596 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12597 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12598 if (GPS_PROCESSING_METHOD_SIZE != count) {
12599 rc = BAD_VALUE;
12600 }
12601 }
12602
12603 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12604 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12606 gps_timestamp)) {
12607 rc = BAD_VALUE;
12608 }
12609 }
12610
12611 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12612 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12613 cam_rotation_info_t rotation_info;
12614 if (orientation == 0) {
12615 rotation_info.rotation = ROTATE_0;
12616 } else if (orientation == 90) {
12617 rotation_info.rotation = ROTATE_90;
12618 } else if (orientation == 180) {
12619 rotation_info.rotation = ROTATE_180;
12620 } else if (orientation == 270) {
12621 rotation_info.rotation = ROTATE_270;
12622 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012623 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012624 rotation_info.streamId = snapshotStreamId;
12625 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12626 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12627 rc = BAD_VALUE;
12628 }
12629 }
12630
12631 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12632 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12634 rc = BAD_VALUE;
12635 }
12636 }
12637
12638 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12639 uint32_t thumb_quality = (uint32_t)
12640 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12641 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12642 thumb_quality)) {
12643 rc = BAD_VALUE;
12644 }
12645 }
12646
12647 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12648 cam_dimension_t dim;
12649 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12650 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
12656 // Internal metadata
12657 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12658 size_t count = 0;
12659 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12660 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12661 privatedata.data.i32, privatedata.count, count);
12662 if (privatedata.count != count) {
12663 rc = BAD_VALUE;
12664 }
12665 }
12666
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012667 // ISO/Exposure Priority
12668 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12669 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12670 cam_priority_mode_t mode =
12671 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12672 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12673 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12674 use_iso_exp_pty.previewOnly = FALSE;
12675 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12676 use_iso_exp_pty.value = *ptr;
12677
12678 if(CAM_ISO_PRIORITY == mode) {
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12680 use_iso_exp_pty)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684 else {
12685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12686 use_iso_exp_pty)) {
12687 rc = BAD_VALUE;
12688 }
12689 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012690
12691 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12692 rc = BAD_VALUE;
12693 }
12694 }
12695 } else {
12696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12697 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012698 }
12699 }
12700
12701 // Saturation
12702 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12703 int32_t* use_saturation =
12704 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12706 rc = BAD_VALUE;
12707 }
12708 }
12709
Thierry Strudel3d639192016-09-09 11:52:26 -070012710 // EV step
12711 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12712 gCamCapability[mCameraId]->exp_compensation_step)) {
12713 rc = BAD_VALUE;
12714 }
12715
12716 // CDS info
12717 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12718 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12719 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12720
12721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12722 CAM_INTF_META_CDS_DATA, *cdsData)) {
12723 rc = BAD_VALUE;
12724 }
12725 }
12726
Shuzhen Wang19463d72016-03-08 11:09:52 -080012727 // Hybrid AE
12728 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12729 uint8_t *hybrid_ae = (uint8_t *)
12730 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12731
12732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12733 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
12737
Shuzhen Wang14415f52016-11-16 18:26:18 -080012738 // Histogram
12739 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12740 uint8_t histogramMode =
12741 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12742 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12743 histogramMode)) {
12744 rc = BAD_VALUE;
12745 }
12746 }
12747
12748 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12749 int32_t histogramBins =
12750 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12752 histogramBins)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012757 // Tracking AF
12758 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12759 uint8_t trackingAfTrigger =
12760 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12762 trackingAfTrigger)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
Thierry Strudel3d639192016-09-09 11:52:26 -070012767 return rc;
12768}
12769
12770/*===========================================================================
12771 * FUNCTION : captureResultCb
12772 *
12773 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12774 *
12775 * PARAMETERS :
12776 * @frame : frame information from mm-camera-interface
12777 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12778 * @userdata: userdata
12779 *
12780 * RETURN : NONE
12781 *==========================================================================*/
12782void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12783 camera3_stream_buffer_t *buffer,
12784 uint32_t frame_number, bool isInputBuffer, void *userdata)
12785{
12786 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12787 if (hw == NULL) {
12788 LOGE("Invalid hw %p", hw);
12789 return;
12790 }
12791
12792 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12793 return;
12794}
12795
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012796/*===========================================================================
12797 * FUNCTION : setBufferErrorStatus
12798 *
12799 * DESCRIPTION: Callback handler for channels to report any buffer errors
12800 *
12801 * PARAMETERS :
12802 * @ch : Channel on which buffer error is reported from
12803 * @frame_number : frame number on which buffer error is reported on
12804 * @buffer_status : buffer error status
12805 * @userdata: userdata
12806 *
12807 * RETURN : NONE
12808 *==========================================================================*/
12809void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12810 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12811{
12812 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12813 if (hw == NULL) {
12814 LOGE("Invalid hw %p", hw);
12815 return;
12816 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012817
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012818 hw->setBufferErrorStatus(ch, frame_number, err);
12819 return;
12820}
12821
12822void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12823 uint32_t frameNumber, camera3_buffer_status_t err)
12824{
12825 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12826 pthread_mutex_lock(&mMutex);
12827
12828 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12829 if (req.frame_number != frameNumber)
12830 continue;
12831 for (auto& k : req.mPendingBufferList) {
12832 if(k.stream->priv == ch) {
12833 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12834 }
12835 }
12836 }
12837
12838 pthread_mutex_unlock(&mMutex);
12839 return;
12840}
Thierry Strudel3d639192016-09-09 11:52:26 -070012841/*===========================================================================
12842 * FUNCTION : initialize
12843 *
12844 * DESCRIPTION: Pass framework callback pointers to HAL
12845 *
12846 * PARAMETERS :
12847 *
12848 *
12849 * RETURN : Success : 0
12850 * Failure: -ENODEV
12851 *==========================================================================*/
12852
12853int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12854 const camera3_callback_ops_t *callback_ops)
12855{
12856 LOGD("E");
12857 QCamera3HardwareInterface *hw =
12858 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12859 if (!hw) {
12860 LOGE("NULL camera device");
12861 return -ENODEV;
12862 }
12863
12864 int rc = hw->initialize(callback_ops);
12865 LOGD("X");
12866 return rc;
12867}
12868
12869/*===========================================================================
12870 * FUNCTION : configure_streams
12871 *
12872 * DESCRIPTION:
12873 *
12874 * PARAMETERS :
12875 *
12876 *
12877 * RETURN : Success: 0
12878 * Failure: -EINVAL (if stream configuration is invalid)
12879 * -ENODEV (fatal error)
12880 *==========================================================================*/
12881
12882int QCamera3HardwareInterface::configure_streams(
12883 const struct camera3_device *device,
12884 camera3_stream_configuration_t *stream_list)
12885{
12886 LOGD("E");
12887 QCamera3HardwareInterface *hw =
12888 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12889 if (!hw) {
12890 LOGE("NULL camera device");
12891 return -ENODEV;
12892 }
12893 int rc = hw->configureStreams(stream_list);
12894 LOGD("X");
12895 return rc;
12896}
12897
12898/*===========================================================================
12899 * FUNCTION : construct_default_request_settings
12900 *
12901 * DESCRIPTION: Configure a settings buffer to meet the required use case
12902 *
12903 * PARAMETERS :
12904 *
12905 *
12906 * RETURN : Success: Return valid metadata
12907 * Failure: Return NULL
12908 *==========================================================================*/
12909const camera_metadata_t* QCamera3HardwareInterface::
12910 construct_default_request_settings(const struct camera3_device *device,
12911 int type)
12912{
12913
12914 LOGD("E");
12915 camera_metadata_t* fwk_metadata = NULL;
12916 QCamera3HardwareInterface *hw =
12917 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12918 if (!hw) {
12919 LOGE("NULL camera device");
12920 return NULL;
12921 }
12922
12923 fwk_metadata = hw->translateCapabilityToMetadata(type);
12924
12925 LOGD("X");
12926 return fwk_metadata;
12927}
12928
12929/*===========================================================================
12930 * FUNCTION : process_capture_request
12931 *
12932 * DESCRIPTION:
12933 *
12934 * PARAMETERS :
12935 *
12936 *
12937 * RETURN :
12938 *==========================================================================*/
12939int QCamera3HardwareInterface::process_capture_request(
12940 const struct camera3_device *device,
12941 camera3_capture_request_t *request)
12942{
12943 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012944 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012945 QCamera3HardwareInterface *hw =
12946 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12947 if (!hw) {
12948 LOGE("NULL camera device");
12949 return -EINVAL;
12950 }
12951
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012952 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012953 LOGD("X");
12954 return rc;
12955}
12956
12957/*===========================================================================
12958 * FUNCTION : dump
12959 *
12960 * DESCRIPTION:
12961 *
12962 * PARAMETERS :
12963 *
12964 *
12965 * RETURN :
12966 *==========================================================================*/
12967
12968void QCamera3HardwareInterface::dump(
12969 const struct camera3_device *device, int fd)
12970{
12971 /* Log level property is read when "adb shell dumpsys media.camera" is
12972 called so that the log level can be controlled without restarting
12973 the media server */
12974 getLogLevel();
12975
12976 LOGD("E");
12977 QCamera3HardwareInterface *hw =
12978 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12979 if (!hw) {
12980 LOGE("NULL camera device");
12981 return;
12982 }
12983
12984 hw->dump(fd);
12985 LOGD("X");
12986 return;
12987}
12988
12989/*===========================================================================
12990 * FUNCTION : flush
12991 *
12992 * DESCRIPTION:
12993 *
12994 * PARAMETERS :
12995 *
12996 *
12997 * RETURN :
12998 *==========================================================================*/
12999
13000int QCamera3HardwareInterface::flush(
13001 const struct camera3_device *device)
13002{
13003 int rc;
13004 LOGD("E");
13005 QCamera3HardwareInterface *hw =
13006 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13007 if (!hw) {
13008 LOGE("NULL camera device");
13009 return -EINVAL;
13010 }
13011
13012 pthread_mutex_lock(&hw->mMutex);
13013 // Validate current state
13014 switch (hw->mState) {
13015 case STARTED:
13016 /* valid state */
13017 break;
13018
13019 case ERROR:
13020 pthread_mutex_unlock(&hw->mMutex);
13021 hw->handleCameraDeviceError();
13022 return -ENODEV;
13023
13024 default:
13025 LOGI("Flush returned during state %d", hw->mState);
13026 pthread_mutex_unlock(&hw->mMutex);
13027 return 0;
13028 }
13029 pthread_mutex_unlock(&hw->mMutex);
13030
13031 rc = hw->flush(true /* restart channels */ );
13032 LOGD("X");
13033 return rc;
13034}
13035
13036/*===========================================================================
13037 * FUNCTION : close_camera_device
13038 *
13039 * DESCRIPTION:
13040 *
13041 * PARAMETERS :
13042 *
13043 *
13044 * RETURN :
13045 *==========================================================================*/
13046int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13047{
13048 int ret = NO_ERROR;
13049 QCamera3HardwareInterface *hw =
13050 reinterpret_cast<QCamera3HardwareInterface *>(
13051 reinterpret_cast<camera3_device_t *>(device)->priv);
13052 if (!hw) {
13053 LOGE("NULL camera device");
13054 return BAD_VALUE;
13055 }
13056
13057 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13058 delete hw;
13059 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013060 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013061 return ret;
13062}
13063
13064/*===========================================================================
13065 * FUNCTION : getWaveletDenoiseProcessPlate
13066 *
13067 * DESCRIPTION: query wavelet denoise process plate
13068 *
13069 * PARAMETERS : None
13070 *
13071 * RETURN : WNR prcocess plate value
13072 *==========================================================================*/
13073cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13074{
13075 char prop[PROPERTY_VALUE_MAX];
13076 memset(prop, 0, sizeof(prop));
13077 property_get("persist.denoise.process.plates", prop, "0");
13078 int processPlate = atoi(prop);
13079 switch(processPlate) {
13080 case 0:
13081 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13082 case 1:
13083 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13084 case 2:
13085 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13086 case 3:
13087 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13088 default:
13089 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13090 }
13091}
13092
13093
13094/*===========================================================================
13095 * FUNCTION : getTemporalDenoiseProcessPlate
13096 *
13097 * DESCRIPTION: query temporal denoise process plate
13098 *
13099 * PARAMETERS : None
13100 *
13101 * RETURN : TNR prcocess plate value
13102 *==========================================================================*/
13103cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13104{
13105 char prop[PROPERTY_VALUE_MAX];
13106 memset(prop, 0, sizeof(prop));
13107 property_get("persist.tnr.process.plates", prop, "0");
13108 int processPlate = atoi(prop);
13109 switch(processPlate) {
13110 case 0:
13111 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13112 case 1:
13113 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13114 case 2:
13115 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13116 case 3:
13117 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13118 default:
13119 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13120 }
13121}
13122
13123
13124/*===========================================================================
13125 * FUNCTION : extractSceneMode
13126 *
13127 * DESCRIPTION: Extract scene mode from frameworks set metadata
13128 *
13129 * PARAMETERS :
13130 * @frame_settings: CameraMetadata reference
13131 * @metaMode: ANDROID_CONTORL_MODE
13132 * @hal_metadata: hal metadata structure
13133 *
13134 * RETURN : None
13135 *==========================================================================*/
13136int32_t QCamera3HardwareInterface::extractSceneMode(
13137 const CameraMetadata &frame_settings, uint8_t metaMode,
13138 metadata_buffer_t *hal_metadata)
13139{
13140 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013141 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13142
13143 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13144 LOGD("Ignoring control mode OFF_KEEP_STATE");
13145 return NO_ERROR;
13146 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013147
13148 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13149 camera_metadata_ro_entry entry =
13150 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13151 if (0 == entry.count)
13152 return rc;
13153
13154 uint8_t fwk_sceneMode = entry.data.u8[0];
13155
13156 int val = lookupHalName(SCENE_MODES_MAP,
13157 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13158 fwk_sceneMode);
13159 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013160 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013161 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013162 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013163 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013164
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013165 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13166 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13167 }
13168
13169 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13170 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013171 cam_hdr_param_t hdr_params;
13172 hdr_params.hdr_enable = 1;
13173 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13174 hdr_params.hdr_need_1x = false;
13175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13176 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13177 rc = BAD_VALUE;
13178 }
13179 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013180
Thierry Strudel3d639192016-09-09 11:52:26 -070013181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13182 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13183 rc = BAD_VALUE;
13184 }
13185 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013186
13187 if (mForceHdrSnapshot) {
13188 cam_hdr_param_t hdr_params;
13189 hdr_params.hdr_enable = 1;
13190 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13191 hdr_params.hdr_need_1x = false;
13192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13193 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13194 rc = BAD_VALUE;
13195 }
13196 }
13197
Thierry Strudel3d639192016-09-09 11:52:26 -070013198 return rc;
13199}
13200
13201/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013202 * FUNCTION : setVideoHdrMode
13203 *
13204 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13205 *
13206 * PARAMETERS :
13207 * @hal_metadata: hal metadata structure
13208 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13209 *
13210 * RETURN : None
13211 *==========================================================================*/
13212int32_t QCamera3HardwareInterface::setVideoHdrMode(
13213 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13214{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013215 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13216 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13217 }
13218
13219 LOGE("Invalid Video HDR mode %d!", vhdr);
13220 return BAD_VALUE;
13221}
13222
13223/*===========================================================================
13224 * FUNCTION : setSensorHDR
13225 *
13226 * DESCRIPTION: Enable/disable sensor HDR.
13227 *
13228 * PARAMETERS :
13229 * @hal_metadata: hal metadata structure
13230 * @enable: boolean whether to enable/disable sensor HDR
13231 *
13232 * RETURN : None
13233 *==========================================================================*/
13234int32_t QCamera3HardwareInterface::setSensorHDR(
13235 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13236{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013237 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013238 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13239
13240 if (enable) {
13241 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13242 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13243 #ifdef _LE_CAMERA_
13244 //Default to staggered HDR for IOT
13245 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13246 #else
13247 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13248 #endif
13249 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13250 }
13251
13252 bool isSupported = false;
13253 switch (sensor_hdr) {
13254 case CAM_SENSOR_HDR_IN_SENSOR:
13255 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13256 CAM_QCOM_FEATURE_SENSOR_HDR) {
13257 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013258 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013259 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013260 break;
13261 case CAM_SENSOR_HDR_ZIGZAG:
13262 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13263 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13264 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013265 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013266 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013267 break;
13268 case CAM_SENSOR_HDR_STAGGERED:
13269 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13270 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13271 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013272 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013273 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013274 break;
13275 case CAM_SENSOR_HDR_OFF:
13276 isSupported = true;
13277 LOGD("Turning off sensor HDR");
13278 break;
13279 default:
13280 LOGE("HDR mode %d not supported", sensor_hdr);
13281 rc = BAD_VALUE;
13282 break;
13283 }
13284
13285 if(isSupported) {
13286 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13287 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13288 rc = BAD_VALUE;
13289 } else {
13290 if(!isVideoHdrEnable)
13291 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013292 }
13293 }
13294 return rc;
13295}
13296
13297/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013298 * FUNCTION : needRotationReprocess
13299 *
13300 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13301 *
13302 * PARAMETERS : none
13303 *
13304 * RETURN : true: needed
13305 * false: no need
13306 *==========================================================================*/
13307bool QCamera3HardwareInterface::needRotationReprocess()
13308{
13309 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13310 // current rotation is not zero, and pp has the capability to process rotation
13311 LOGH("need do reprocess for rotation");
13312 return true;
13313 }
13314
13315 return false;
13316}
13317
13318/*===========================================================================
13319 * FUNCTION : needReprocess
13320 *
13321 * DESCRIPTION: if reprocess in needed
13322 *
13323 * PARAMETERS : none
13324 *
13325 * RETURN : true: needed
13326 * false: no need
13327 *==========================================================================*/
13328bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13329{
13330 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13331 // TODO: add for ZSL HDR later
13332 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13333 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13334 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13335 return true;
13336 } else {
13337 LOGH("already post processed frame");
13338 return false;
13339 }
13340 }
13341 return needRotationReprocess();
13342}
13343
13344/*===========================================================================
13345 * FUNCTION : needJpegExifRotation
13346 *
13347 * DESCRIPTION: if rotation from jpeg is needed
13348 *
13349 * PARAMETERS : none
13350 *
13351 * RETURN : true: needed
13352 * false: no need
13353 *==========================================================================*/
13354bool QCamera3HardwareInterface::needJpegExifRotation()
13355{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013356 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013357 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13358 LOGD("Need use Jpeg EXIF Rotation");
13359 return true;
13360 }
13361 return false;
13362}
13363
13364/*===========================================================================
13365 * FUNCTION : addOfflineReprocChannel
13366 *
13367 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13368 * coming from input channel
13369 *
13370 * PARAMETERS :
13371 * @config : reprocess configuration
13372 * @inputChHandle : pointer to the input (source) channel
13373 *
13374 *
13375 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13376 *==========================================================================*/
13377QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13378 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13379{
13380 int32_t rc = NO_ERROR;
13381 QCamera3ReprocessChannel *pChannel = NULL;
13382
13383 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013384 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13385 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013386 if (NULL == pChannel) {
13387 LOGE("no mem for reprocess channel");
13388 return NULL;
13389 }
13390
13391 rc = pChannel->initialize(IS_TYPE_NONE);
13392 if (rc != NO_ERROR) {
13393 LOGE("init reprocess channel failed, ret = %d", rc);
13394 delete pChannel;
13395 return NULL;
13396 }
13397
13398 // pp feature config
13399 cam_pp_feature_config_t pp_config;
13400 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13401
13402 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13403 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13404 & CAM_QCOM_FEATURE_DSDN) {
13405 //Use CPP CDS incase h/w supports it.
13406 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13407 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13408 }
13409 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13410 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13411 }
13412
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013413 if (config.hdr_param.hdr_enable) {
13414 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13415 pp_config.hdr_param = config.hdr_param;
13416 }
13417
13418 if (mForceHdrSnapshot) {
13419 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13420 pp_config.hdr_param.hdr_enable = 1;
13421 pp_config.hdr_param.hdr_need_1x = 0;
13422 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13423 }
13424
Thierry Strudel3d639192016-09-09 11:52:26 -070013425 rc = pChannel->addReprocStreamsFromSource(pp_config,
13426 config,
13427 IS_TYPE_NONE,
13428 mMetadataChannel);
13429
13430 if (rc != NO_ERROR) {
13431 delete pChannel;
13432 return NULL;
13433 }
13434 return pChannel;
13435}
13436
13437/*===========================================================================
13438 * FUNCTION : getMobicatMask
13439 *
13440 * DESCRIPTION: returns mobicat mask
13441 *
13442 * PARAMETERS : none
13443 *
13444 * RETURN : mobicat mask
13445 *
13446 *==========================================================================*/
13447uint8_t QCamera3HardwareInterface::getMobicatMask()
13448{
13449 return m_MobicatMask;
13450}
13451
13452/*===========================================================================
13453 * FUNCTION : setMobicat
13454 *
13455 * DESCRIPTION: set Mobicat on/off.
13456 *
13457 * PARAMETERS :
13458 * @params : none
13459 *
13460 * RETURN : int32_t type of status
13461 * NO_ERROR -- success
13462 * none-zero failure code
13463 *==========================================================================*/
13464int32_t QCamera3HardwareInterface::setMobicat()
13465{
13466 char value [PROPERTY_VALUE_MAX];
13467 property_get("persist.camera.mobicat", value, "0");
13468 int32_t ret = NO_ERROR;
13469 uint8_t enableMobi = (uint8_t)atoi(value);
13470
13471 if (enableMobi) {
13472 tune_cmd_t tune_cmd;
13473 tune_cmd.type = SET_RELOAD_CHROMATIX;
13474 tune_cmd.module = MODULE_ALL;
13475 tune_cmd.value = TRUE;
13476 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13477 CAM_INTF_PARM_SET_VFE_COMMAND,
13478 tune_cmd);
13479
13480 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13481 CAM_INTF_PARM_SET_PP_COMMAND,
13482 tune_cmd);
13483 }
13484 m_MobicatMask = enableMobi;
13485
13486 return ret;
13487}
13488
13489/*===========================================================================
13490* FUNCTION : getLogLevel
13491*
13492* DESCRIPTION: Reads the log level property into a variable
13493*
13494* PARAMETERS :
13495* None
13496*
13497* RETURN :
13498* None
13499*==========================================================================*/
13500void QCamera3HardwareInterface::getLogLevel()
13501{
13502 char prop[PROPERTY_VALUE_MAX];
13503 uint32_t globalLogLevel = 0;
13504
13505 property_get("persist.camera.hal.debug", prop, "0");
13506 int val = atoi(prop);
13507 if (0 <= val) {
13508 gCamHal3LogLevel = (uint32_t)val;
13509 }
13510
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013511 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013512 gKpiDebugLevel = atoi(prop);
13513
13514 property_get("persist.camera.global.debug", prop, "0");
13515 val = atoi(prop);
13516 if (0 <= val) {
13517 globalLogLevel = (uint32_t)val;
13518 }
13519
13520 /* Highest log level among hal.logs and global.logs is selected */
13521 if (gCamHal3LogLevel < globalLogLevel)
13522 gCamHal3LogLevel = globalLogLevel;
13523
13524 return;
13525}
13526
13527/*===========================================================================
13528 * FUNCTION : validateStreamRotations
13529 *
13530 * DESCRIPTION: Check if the rotations requested are supported
13531 *
13532 * PARAMETERS :
13533 * @stream_list : streams to be configured
13534 *
13535 * RETURN : NO_ERROR on success
13536 * -EINVAL on failure
13537 *
13538 *==========================================================================*/
13539int QCamera3HardwareInterface::validateStreamRotations(
13540 camera3_stream_configuration_t *streamList)
13541{
13542 int rc = NO_ERROR;
13543
13544 /*
13545 * Loop through all streams requested in configuration
13546 * Check if unsupported rotations have been requested on any of them
13547 */
13548 for (size_t j = 0; j < streamList->num_streams; j++){
13549 camera3_stream_t *newStream = streamList->streams[j];
13550
13551 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13552 bool isImplDef = (newStream->format ==
13553 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13554 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13555 isImplDef);
13556
13557 if (isRotated && (!isImplDef || isZsl)) {
13558 LOGE("Error: Unsupported rotation of %d requested for stream"
13559 "type:%d and stream format:%d",
13560 newStream->rotation, newStream->stream_type,
13561 newStream->format);
13562 rc = -EINVAL;
13563 break;
13564 }
13565 }
13566
13567 return rc;
13568}
13569
13570/*===========================================================================
13571* FUNCTION : getFlashInfo
13572*
13573* DESCRIPTION: Retrieve information about whether the device has a flash.
13574*
13575* PARAMETERS :
13576* @cameraId : Camera id to query
13577* @hasFlash : Boolean indicating whether there is a flash device
13578* associated with given camera
13579* @flashNode : If a flash device exists, this will be its device node.
13580*
13581* RETURN :
13582* None
13583*==========================================================================*/
13584void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13585 bool& hasFlash,
13586 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13587{
13588 cam_capability_t* camCapability = gCamCapability[cameraId];
13589 if (NULL == camCapability) {
13590 hasFlash = false;
13591 flashNode[0] = '\0';
13592 } else {
13593 hasFlash = camCapability->flash_available;
13594 strlcpy(flashNode,
13595 (char*)camCapability->flash_dev_name,
13596 QCAMERA_MAX_FILEPATH_LENGTH);
13597 }
13598}
13599
13600/*===========================================================================
13601* FUNCTION : getEepromVersionInfo
13602*
13603* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13604*
13605* PARAMETERS : None
13606*
13607* RETURN : string describing EEPROM version
13608* "\0" if no such info available
13609*==========================================================================*/
13610const char *QCamera3HardwareInterface::getEepromVersionInfo()
13611{
13612 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13613}
13614
13615/*===========================================================================
13616* FUNCTION : getLdafCalib
13617*
13618* DESCRIPTION: Retrieve Laser AF calibration data
13619*
13620* PARAMETERS : None
13621*
13622* RETURN : Two uint32_t describing laser AF calibration data
13623* NULL if none is available.
13624*==========================================================================*/
13625const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13626{
13627 if (mLdafCalibExist) {
13628 return &mLdafCalib[0];
13629 } else {
13630 return NULL;
13631 }
13632}
13633
13634/*===========================================================================
13635 * FUNCTION : dynamicUpdateMetaStreamInfo
13636 *
13637 * DESCRIPTION: This function:
13638 * (1) stops all the channels
13639 * (2) returns error on pending requests and buffers
13640 * (3) sends metastream_info in setparams
13641 * (4) starts all channels
13642 * This is useful when sensor has to be restarted to apply any
13643 * settings such as frame rate from a different sensor mode
13644 *
13645 * PARAMETERS : None
13646 *
13647 * RETURN : NO_ERROR on success
13648 * Error codes on failure
13649 *
13650 *==========================================================================*/
13651int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013654 int rc = NO_ERROR;
13655
13656 LOGD("E");
13657
13658 rc = stopAllChannels();
13659 if (rc < 0) {
13660 LOGE("stopAllChannels failed");
13661 return rc;
13662 }
13663
13664 rc = notifyErrorForPendingRequests();
13665 if (rc < 0) {
13666 LOGE("notifyErrorForPendingRequests failed");
13667 return rc;
13668 }
13669
13670 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13671 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13672 "Format:%d",
13673 mStreamConfigInfo.type[i],
13674 mStreamConfigInfo.stream_sizes[i].width,
13675 mStreamConfigInfo.stream_sizes[i].height,
13676 mStreamConfigInfo.postprocess_mask[i],
13677 mStreamConfigInfo.format[i]);
13678 }
13679
13680 /* Send meta stream info once again so that ISP can start */
13681 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13682 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13683 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13684 mParameters);
13685 if (rc < 0) {
13686 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13687 }
13688
13689 rc = startAllChannels();
13690 if (rc < 0) {
13691 LOGE("startAllChannels failed");
13692 return rc;
13693 }
13694
13695 LOGD("X");
13696 return rc;
13697}
13698
13699/*===========================================================================
13700 * FUNCTION : stopAllChannels
13701 *
13702 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13703 *
13704 * PARAMETERS : None
13705 *
13706 * RETURN : NO_ERROR on success
13707 * Error codes on failure
13708 *
13709 *==========================================================================*/
13710int32_t QCamera3HardwareInterface::stopAllChannels()
13711{
13712 int32_t rc = NO_ERROR;
13713
13714 LOGD("Stopping all channels");
13715 // Stop the Streams/Channels
13716 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13717 it != mStreamInfo.end(); it++) {
13718 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13719 if (channel) {
13720 channel->stop();
13721 }
13722 (*it)->status = INVALID;
13723 }
13724
13725 if (mSupportChannel) {
13726 mSupportChannel->stop();
13727 }
13728 if (mAnalysisChannel) {
13729 mAnalysisChannel->stop();
13730 }
13731 if (mRawDumpChannel) {
13732 mRawDumpChannel->stop();
13733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013734 if (mHdrPlusRawSrcChannel) {
13735 mHdrPlusRawSrcChannel->stop();
13736 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013737 if (mMetadataChannel) {
13738 /* If content of mStreamInfo is not 0, there is metadata stream */
13739 mMetadataChannel->stop();
13740 }
13741
13742 LOGD("All channels stopped");
13743 return rc;
13744}
13745
13746/*===========================================================================
13747 * FUNCTION : startAllChannels
13748 *
13749 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13750 *
13751 * PARAMETERS : None
13752 *
13753 * RETURN : NO_ERROR on success
13754 * Error codes on failure
13755 *
13756 *==========================================================================*/
13757int32_t QCamera3HardwareInterface::startAllChannels()
13758{
13759 int32_t rc = NO_ERROR;
13760
13761 LOGD("Start all channels ");
13762 // Start the Streams/Channels
13763 if (mMetadataChannel) {
13764 /* If content of mStreamInfo is not 0, there is metadata stream */
13765 rc = mMetadataChannel->start();
13766 if (rc < 0) {
13767 LOGE("META channel start failed");
13768 return rc;
13769 }
13770 }
13771 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13772 it != mStreamInfo.end(); it++) {
13773 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13774 if (channel) {
13775 rc = channel->start();
13776 if (rc < 0) {
13777 LOGE("channel start failed");
13778 return rc;
13779 }
13780 }
13781 }
13782 if (mAnalysisChannel) {
13783 mAnalysisChannel->start();
13784 }
13785 if (mSupportChannel) {
13786 rc = mSupportChannel->start();
13787 if (rc < 0) {
13788 LOGE("Support channel start failed");
13789 return rc;
13790 }
13791 }
13792 if (mRawDumpChannel) {
13793 rc = mRawDumpChannel->start();
13794 if (rc < 0) {
13795 LOGE("RAW dump channel start failed");
13796 return rc;
13797 }
13798 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013799 if (mHdrPlusRawSrcChannel) {
13800 rc = mHdrPlusRawSrcChannel->start();
13801 if (rc < 0) {
13802 LOGE("HDR+ RAW channel start failed");
13803 return rc;
13804 }
13805 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013806
13807 LOGD("All channels started");
13808 return rc;
13809}
13810
13811/*===========================================================================
13812 * FUNCTION : notifyErrorForPendingRequests
13813 *
13814 * DESCRIPTION: This function sends error for all the pending requests/buffers
13815 *
13816 * PARAMETERS : None
13817 *
13818 * RETURN : Error codes
13819 * NO_ERROR on success
13820 *
13821 *==========================================================================*/
13822int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13823{
Emilian Peev7650c122017-01-19 08:24:33 -080013824 notifyErrorFoPendingDepthData(mDepthChannel);
13825
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013826 auto pendingRequest = mPendingRequestsList.begin();
13827 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013828
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013829 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13830 // buffers (for which buffers aren't sent yet).
13831 while (pendingRequest != mPendingRequestsList.end() ||
13832 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13833 if (pendingRequest == mPendingRequestsList.end() ||
13834 pendingBuffer->frame_number < pendingRequest->frame_number) {
13835 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13836 // with error.
13837 for (auto &info : pendingBuffer->mPendingBufferList) {
13838 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013839 camera3_notify_msg_t notify_msg;
13840 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13841 notify_msg.type = CAMERA3_MSG_ERROR;
13842 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013843 notify_msg.message.error.error_stream = info.stream;
13844 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013845 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013846
13847 camera3_stream_buffer_t buffer = {};
13848 buffer.acquire_fence = -1;
13849 buffer.release_fence = -1;
13850 buffer.buffer = info.buffer;
13851 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13852 buffer.stream = info.stream;
13853 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013854 }
13855
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013856 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13857 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13858 pendingBuffer->frame_number > pendingRequest->frame_number) {
13859 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013860 camera3_notify_msg_t notify_msg;
13861 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13862 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013863 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13864 notify_msg.message.error.error_stream = nullptr;
13865 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013866 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013867
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013868 if (pendingRequest->input_buffer != nullptr) {
13869 camera3_capture_result result = {};
13870 result.frame_number = pendingRequest->frame_number;
13871 result.result = nullptr;
13872 result.input_buffer = pendingRequest->input_buffer;
13873 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013874 }
13875
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013876 mShutterDispatcher.clear(pendingRequest->frame_number);
13877 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13878 } else {
13879 // If both buffers and result metadata weren't sent yet, notify about a request error
13880 // and return buffers with error.
13881 for (auto &info : pendingBuffer->mPendingBufferList) {
13882 camera3_notify_msg_t notify_msg;
13883 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13884 notify_msg.type = CAMERA3_MSG_ERROR;
13885 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13886 notify_msg.message.error.error_stream = info.stream;
13887 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13888 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013889
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013890 camera3_stream_buffer_t buffer = {};
13891 buffer.acquire_fence = -1;
13892 buffer.release_fence = -1;
13893 buffer.buffer = info.buffer;
13894 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13895 buffer.stream = info.stream;
13896 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13897 }
13898
13899 if (pendingRequest->input_buffer != nullptr) {
13900 camera3_capture_result result = {};
13901 result.frame_number = pendingRequest->frame_number;
13902 result.result = nullptr;
13903 result.input_buffer = pendingRequest->input_buffer;
13904 orchestrateResult(&result);
13905 }
13906
13907 mShutterDispatcher.clear(pendingRequest->frame_number);
13908 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13909 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013910 }
13911 }
13912
13913 /* Reset pending frame Drop list and requests list */
13914 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013915 mShutterDispatcher.clear();
13916 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013917 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013918 LOGH("Cleared all the pending buffers ");
13919
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013920 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013921}
13922
13923bool QCamera3HardwareInterface::isOnEncoder(
13924 const cam_dimension_t max_viewfinder_size,
13925 uint32_t width, uint32_t height)
13926{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013927 return ((width > (uint32_t)max_viewfinder_size.width) ||
13928 (height > (uint32_t)max_viewfinder_size.height) ||
13929 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13930 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013931}
13932
13933/*===========================================================================
13934 * FUNCTION : setBundleInfo
13935 *
13936 * DESCRIPTION: Set bundle info for all streams that are bundle.
13937 *
13938 * PARAMETERS : None
13939 *
13940 * RETURN : NO_ERROR on success
13941 * Error codes on failure
13942 *==========================================================================*/
13943int32_t QCamera3HardwareInterface::setBundleInfo()
13944{
13945 int32_t rc = NO_ERROR;
13946
13947 if (mChannelHandle) {
13948 cam_bundle_config_t bundleInfo;
13949 memset(&bundleInfo, 0, sizeof(bundleInfo));
13950 rc = mCameraHandle->ops->get_bundle_info(
13951 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13952 if (rc != NO_ERROR) {
13953 LOGE("get_bundle_info failed");
13954 return rc;
13955 }
13956 if (mAnalysisChannel) {
13957 mAnalysisChannel->setBundleInfo(bundleInfo);
13958 }
13959 if (mSupportChannel) {
13960 mSupportChannel->setBundleInfo(bundleInfo);
13961 }
13962 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13963 it != mStreamInfo.end(); it++) {
13964 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13965 channel->setBundleInfo(bundleInfo);
13966 }
13967 if (mRawDumpChannel) {
13968 mRawDumpChannel->setBundleInfo(bundleInfo);
13969 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013970 if (mHdrPlusRawSrcChannel) {
13971 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13972 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013973 }
13974
13975 return rc;
13976}
13977
13978/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013979 * FUNCTION : setInstantAEC
13980 *
13981 * DESCRIPTION: Set Instant AEC related params.
13982 *
13983 * PARAMETERS :
13984 * @meta: CameraMetadata reference
13985 *
13986 * RETURN : NO_ERROR on success
13987 * Error codes on failure
13988 *==========================================================================*/
13989int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13990{
13991 int32_t rc = NO_ERROR;
13992 uint8_t val = 0;
13993 char prop[PROPERTY_VALUE_MAX];
13994
13995 // First try to configure instant AEC from framework metadata
13996 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13997 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13998 }
13999
14000 // If framework did not set this value, try to read from set prop.
14001 if (val == 0) {
14002 memset(prop, 0, sizeof(prop));
14003 property_get("persist.camera.instant.aec", prop, "0");
14004 val = (uint8_t)atoi(prop);
14005 }
14006
14007 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14008 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14010 mInstantAEC = val;
14011 mInstantAECSettledFrameNumber = 0;
14012 mInstantAecFrameIdxCount = 0;
14013 LOGH("instantAEC value set %d",val);
14014 if (mInstantAEC) {
14015 memset(prop, 0, sizeof(prop));
14016 property_get("persist.camera.ae.instant.bound", prop, "10");
14017 int32_t aec_frame_skip_cnt = atoi(prop);
14018 if (aec_frame_skip_cnt >= 0) {
14019 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14020 } else {
14021 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14022 rc = BAD_VALUE;
14023 }
14024 }
14025 } else {
14026 LOGE("Bad instant aec value set %d", val);
14027 rc = BAD_VALUE;
14028 }
14029 return rc;
14030}
14031
14032/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014033 * FUNCTION : get_num_overall_buffers
14034 *
14035 * DESCRIPTION: Estimate number of pending buffers across all requests.
14036 *
14037 * PARAMETERS : None
14038 *
14039 * RETURN : Number of overall pending buffers
14040 *
14041 *==========================================================================*/
14042uint32_t PendingBuffersMap::get_num_overall_buffers()
14043{
14044 uint32_t sum_buffers = 0;
14045 for (auto &req : mPendingBuffersInRequest) {
14046 sum_buffers += req.mPendingBufferList.size();
14047 }
14048 return sum_buffers;
14049}
14050
14051/*===========================================================================
14052 * FUNCTION : removeBuf
14053 *
14054 * DESCRIPTION: Remove a matching buffer from tracker.
14055 *
14056 * PARAMETERS : @buffer: image buffer for the callback
14057 *
14058 * RETURN : None
14059 *
14060 *==========================================================================*/
14061void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14062{
14063 bool buffer_found = false;
14064 for (auto req = mPendingBuffersInRequest.begin();
14065 req != mPendingBuffersInRequest.end(); req++) {
14066 for (auto k = req->mPendingBufferList.begin();
14067 k != req->mPendingBufferList.end(); k++ ) {
14068 if (k->buffer == buffer) {
14069 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14070 req->frame_number, buffer);
14071 k = req->mPendingBufferList.erase(k);
14072 if (req->mPendingBufferList.empty()) {
14073 // Remove this request from Map
14074 req = mPendingBuffersInRequest.erase(req);
14075 }
14076 buffer_found = true;
14077 break;
14078 }
14079 }
14080 if (buffer_found) {
14081 break;
14082 }
14083 }
14084 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14085 get_num_overall_buffers());
14086}
14087
14088/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014089 * FUNCTION : getBufErrStatus
14090 *
14091 * DESCRIPTION: get buffer error status
14092 *
14093 * PARAMETERS : @buffer: buffer handle
14094 *
14095 * RETURN : Error status
14096 *
14097 *==========================================================================*/
14098int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14099{
14100 for (auto& req : mPendingBuffersInRequest) {
14101 for (auto& k : req.mPendingBufferList) {
14102 if (k.buffer == buffer)
14103 return k.bufStatus;
14104 }
14105 }
14106 return CAMERA3_BUFFER_STATUS_OK;
14107}
14108
14109/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014110 * FUNCTION : setPAAFSupport
14111 *
14112 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14113 * feature mask according to stream type and filter
14114 * arrangement
14115 *
14116 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14117 * @stream_type: stream type
14118 * @filter_arrangement: filter arrangement
14119 *
14120 * RETURN : None
14121 *==========================================================================*/
14122void QCamera3HardwareInterface::setPAAFSupport(
14123 cam_feature_mask_t& feature_mask,
14124 cam_stream_type_t stream_type,
14125 cam_color_filter_arrangement_t filter_arrangement)
14126{
Thierry Strudel3d639192016-09-09 11:52:26 -070014127 switch (filter_arrangement) {
14128 case CAM_FILTER_ARRANGEMENT_RGGB:
14129 case CAM_FILTER_ARRANGEMENT_GRBG:
14130 case CAM_FILTER_ARRANGEMENT_GBRG:
14131 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014132 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14133 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014134 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014135 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14136 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014137 }
14138 break;
14139 case CAM_FILTER_ARRANGEMENT_Y:
14140 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14141 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14142 }
14143 break;
14144 default:
14145 break;
14146 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014147 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14148 feature_mask, stream_type, filter_arrangement);
14149
14150
Thierry Strudel3d639192016-09-09 11:52:26 -070014151}
14152
14153/*===========================================================================
14154* FUNCTION : getSensorMountAngle
14155*
14156* DESCRIPTION: Retrieve sensor mount angle
14157*
14158* PARAMETERS : None
14159*
14160* RETURN : sensor mount angle in uint32_t
14161*==========================================================================*/
14162uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14163{
14164 return gCamCapability[mCameraId]->sensor_mount_angle;
14165}
14166
14167/*===========================================================================
14168* FUNCTION : getRelatedCalibrationData
14169*
14170* DESCRIPTION: Retrieve related system calibration data
14171*
14172* PARAMETERS : None
14173*
14174* RETURN : Pointer of related system calibration data
14175*==========================================================================*/
14176const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14177{
14178 return (const cam_related_system_calibration_data_t *)
14179 &(gCamCapability[mCameraId]->related_cam_calibration);
14180}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014181
14182/*===========================================================================
14183 * FUNCTION : is60HzZone
14184 *
14185 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14186 *
14187 * PARAMETERS : None
14188 *
14189 * RETURN : True if in 60Hz zone, False otherwise
14190 *==========================================================================*/
14191bool QCamera3HardwareInterface::is60HzZone()
14192{
14193 time_t t = time(NULL);
14194 struct tm lt;
14195
14196 struct tm* r = localtime_r(&t, &lt);
14197
14198 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14199 return true;
14200 else
14201 return false;
14202}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014203
14204/*===========================================================================
14205 * FUNCTION : adjustBlackLevelForCFA
14206 *
14207 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14208 * of bayer CFA (Color Filter Array).
14209 *
14210 * PARAMETERS : @input: black level pattern in the order of RGGB
14211 * @output: black level pattern in the order of CFA
14212 * @color_arrangement: CFA color arrangement
14213 *
14214 * RETURN : None
14215 *==========================================================================*/
14216template<typename T>
14217void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14218 T input[BLACK_LEVEL_PATTERN_CNT],
14219 T output[BLACK_LEVEL_PATTERN_CNT],
14220 cam_color_filter_arrangement_t color_arrangement)
14221{
14222 switch (color_arrangement) {
14223 case CAM_FILTER_ARRANGEMENT_GRBG:
14224 output[0] = input[1];
14225 output[1] = input[0];
14226 output[2] = input[3];
14227 output[3] = input[2];
14228 break;
14229 case CAM_FILTER_ARRANGEMENT_GBRG:
14230 output[0] = input[2];
14231 output[1] = input[3];
14232 output[2] = input[0];
14233 output[3] = input[1];
14234 break;
14235 case CAM_FILTER_ARRANGEMENT_BGGR:
14236 output[0] = input[3];
14237 output[1] = input[2];
14238 output[2] = input[1];
14239 output[3] = input[0];
14240 break;
14241 case CAM_FILTER_ARRANGEMENT_RGGB:
14242 output[0] = input[0];
14243 output[1] = input[1];
14244 output[2] = input[2];
14245 output[3] = input[3];
14246 break;
14247 default:
14248 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14249 break;
14250 }
14251}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014252
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014253void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14254 CameraMetadata &resultMetadata,
14255 std::shared_ptr<metadata_buffer_t> settings)
14256{
14257 if (settings == nullptr) {
14258 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14259 return;
14260 }
14261
14262 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14263 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14264 }
14265
14266 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14267 String8 str((const char *)gps_methods);
14268 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14269 }
14270
14271 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14272 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14273 }
14274
14275 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14276 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14277 }
14278
14279 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14280 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14281 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14282 }
14283
14284 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14285 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14286 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14287 }
14288
14289 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14290 int32_t fwk_thumb_size[2];
14291 fwk_thumb_size[0] = thumb_size->width;
14292 fwk_thumb_size[1] = thumb_size->height;
14293 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14294 }
14295
14296 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14297 uint8_t fwk_intent = intent[0];
14298 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14299 }
14300}
14301
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014302bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14303 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14304 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014305{
14306 if (hdrPlusRequest == nullptr) return false;
14307
14308 // Check noise reduction mode is high quality.
14309 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14310 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14311 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014312 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14313 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014314 return false;
14315 }
14316
14317 // Check edge mode is high quality.
14318 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14319 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14320 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14321 return false;
14322 }
14323
14324 if (request.num_output_buffers != 1 ||
14325 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14326 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014327 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14328 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14329 request.output_buffers[0].stream->width,
14330 request.output_buffers[0].stream->height,
14331 request.output_buffers[0].stream->format);
14332 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014333 return false;
14334 }
14335
14336 // Get a YUV buffer from pic channel.
14337 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14338 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14339 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14340 if (res != OK) {
14341 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14342 __FUNCTION__, strerror(-res), res);
14343 return false;
14344 }
14345
14346 pbcamera::StreamBuffer buffer;
14347 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014348 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014349 buffer.data = yuvBuffer->buffer;
14350 buffer.dataSize = yuvBuffer->frame_len;
14351
14352 pbcamera::CaptureRequest pbRequest;
14353 pbRequest.id = request.frame_number;
14354 pbRequest.outputBuffers.push_back(buffer);
14355
14356 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014357 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014358 if (res != OK) {
14359 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14360 strerror(-res), res);
14361 return false;
14362 }
14363
14364 hdrPlusRequest->yuvBuffer = yuvBuffer;
14365 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14366
14367 return true;
14368}
14369
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014370status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14371{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014372 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14373 return OK;
14374 }
14375
14376 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14377 if (res != OK) {
14378 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14379 strerror(-res), res);
14380 return res;
14381 }
14382 gHdrPlusClientOpening = true;
14383
14384 return OK;
14385}
14386
Chien-Yu Chenee335912017-02-09 17:53:20 -080014387status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14388{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014389 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014390
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014391 if (mHdrPlusModeEnabled) {
14392 return OK;
14393 }
14394
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014395 // Check if gHdrPlusClient is opened or being opened.
14396 if (gHdrPlusClient == nullptr) {
14397 if (gHdrPlusClientOpening) {
14398 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14399 return OK;
14400 }
14401
14402 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014403 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014404 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14405 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014406 return res;
14407 }
14408
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014409 // When opening HDR+ client completes, HDR+ mode will be enabled.
14410 return OK;
14411
Chien-Yu Chenee335912017-02-09 17:53:20 -080014412 }
14413
14414 // Configure stream for HDR+.
14415 res = configureHdrPlusStreamsLocked();
14416 if (res != OK) {
14417 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014418 return res;
14419 }
14420
14421 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14422 res = gHdrPlusClient->setZslHdrPlusMode(true);
14423 if (res != OK) {
14424 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014425 return res;
14426 }
14427
14428 mHdrPlusModeEnabled = true;
14429 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14430
14431 return OK;
14432}
14433
14434void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14435{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014436 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014437 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014438 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14439 if (res != OK) {
14440 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14441 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014442
14443 // Close HDR+ client so Easel can enter low power mode.
14444 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14445 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014446 }
14447
14448 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014449 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014450 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14451}
14452
14453status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014454{
14455 pbcamera::InputConfiguration inputConfig;
14456 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14457 status_t res = OK;
14458
14459 // Configure HDR+ client streams.
14460 // Get input config.
14461 if (mHdrPlusRawSrcChannel) {
14462 // HDR+ input buffers will be provided by HAL.
14463 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14464 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14465 if (res != OK) {
14466 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14467 __FUNCTION__, strerror(-res), res);
14468 return res;
14469 }
14470
14471 inputConfig.isSensorInput = false;
14472 } else {
14473 // Sensor MIPI will send data to Easel.
14474 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014475 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014476 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14477 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14478 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14479 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14480 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14481 if (mSensorModeInfo.num_raw_bits != 10) {
14482 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14483 mSensorModeInfo.num_raw_bits);
14484 return BAD_VALUE;
14485 }
14486
14487 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014488 }
14489
14490 // Get output configurations.
14491 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014492 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014493
14494 // Easel may need to output YUV output buffers if mPictureChannel was created.
14495 pbcamera::StreamConfiguration yuvOutputConfig;
14496 if (mPictureChannel != nullptr) {
14497 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14498 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14499 if (res != OK) {
14500 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14501 __FUNCTION__, strerror(-res), res);
14502
14503 return res;
14504 }
14505
14506 outputStreamConfigs.push_back(yuvOutputConfig);
14507 }
14508
14509 // TODO: consider other channels for YUV output buffers.
14510
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014511 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014512 if (res != OK) {
14513 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14514 strerror(-res), res);
14515 return res;
14516 }
14517
14518 return OK;
14519}
14520
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014521void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14522{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014523 if (client == nullptr) {
14524 ALOGE("%s: Opened client is null.", __FUNCTION__);
14525 return;
14526 }
14527
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014528 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014529 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14530
14531 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014532 if (!gHdrPlusClientOpening) {
14533 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14534 return;
14535 }
14536
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014537 gHdrPlusClient = std::move(client);
14538 gHdrPlusClientOpening = false;
14539
14540 // Set static metadata.
14541 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14542 if (res != OK) {
14543 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14544 __FUNCTION__, strerror(-res), res);
14545 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14546 gHdrPlusClient = nullptr;
14547 return;
14548 }
14549
14550 // Enable HDR+ mode.
14551 res = enableHdrPlusModeLocked();
14552 if (res != OK) {
14553 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14554 }
14555}
14556
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014557void QCamera3HardwareInterface::onOpenFailed(status_t err)
14558{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014559 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14560 Mutex::Autolock l(gHdrPlusClientLock);
14561 gHdrPlusClientOpening = false;
14562}
14563
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014564void QCamera3HardwareInterface::onFatalError()
14565{
14566 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14567
14568 // Set HAL state to error.
14569 pthread_mutex_lock(&mMutex);
14570 mState = ERROR;
14571 pthread_mutex_unlock(&mMutex);
14572
14573 handleCameraDeviceError();
14574}
14575
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014576void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014577 const camera_metadata_t &resultMetadata)
14578{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014579 if (result != nullptr) {
14580 if (result->outputBuffers.size() != 1) {
14581 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14582 result->outputBuffers.size());
14583 return;
14584 }
14585
14586 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14587 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14588 result->outputBuffers[0].streamId);
14589 return;
14590 }
14591
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014592 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014593 HdrPlusPendingRequest pendingRequest;
14594 {
14595 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14596 auto req = mHdrPlusPendingRequests.find(result->requestId);
14597 pendingRequest = req->second;
14598 }
14599
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014600 // Update the result metadata with the settings of the HDR+ still capture request because
14601 // the result metadata belongs to a ZSL buffer.
14602 CameraMetadata metadata;
14603 metadata = &resultMetadata;
14604 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14605 camera_metadata_t* updatedResultMetadata = metadata.release();
14606
14607 QCamera3PicChannel *picChannel =
14608 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14609
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014610 // Check if dumping HDR+ YUV output is enabled.
14611 char prop[PROPERTY_VALUE_MAX];
14612 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14613 bool dumpYuvOutput = atoi(prop);
14614
14615 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014616 // Dump yuv buffer to a ppm file.
14617 pbcamera::StreamConfiguration outputConfig;
14618 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14619 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14620 if (rc == OK) {
14621 char buf[FILENAME_MAX] = {};
14622 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14623 result->requestId, result->outputBuffers[0].streamId,
14624 outputConfig.image.width, outputConfig.image.height);
14625
14626 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14627 } else {
14628 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14629 __FUNCTION__, strerror(-rc), rc);
14630 }
14631 }
14632
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014633 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14634 auto halMetadata = std::make_shared<metadata_buffer_t>();
14635 clear_metadata_buffer(halMetadata.get());
14636
14637 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14638 // encoding.
14639 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14640 halStreamId, /*minFrameDuration*/0);
14641 if (res == OK) {
14642 // Return the buffer to pic channel for encoding.
14643 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14644 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14645 halMetadata);
14646 } else {
14647 // Return the buffer without encoding.
14648 // TODO: This should not happen but we may want to report an error buffer to camera
14649 // service.
14650 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14651 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14652 strerror(-res), res);
14653 }
14654
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014655 // Find the timestamp
14656 camera_metadata_ro_entry_t entry;
14657 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14658 ANDROID_SENSOR_TIMESTAMP, &entry);
14659 if (res != OK) {
14660 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14661 __FUNCTION__, result->requestId, strerror(-res), res);
14662 } else {
14663 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14664 }
14665
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014666 // Send HDR+ metadata to framework.
14667 {
14668 pthread_mutex_lock(&mMutex);
14669
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014670 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14671 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014672 pthread_mutex_unlock(&mMutex);
14673 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014674
14675 // Remove the HDR+ pending request.
14676 {
14677 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14678 auto req = mHdrPlusPendingRequests.find(result->requestId);
14679 mHdrPlusPendingRequests.erase(req);
14680 }
14681 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014682}
14683
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014684void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14685{
14686 if (failedResult == nullptr) {
14687 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14688 return;
14689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014690
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014691 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014692
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014693 // Remove the pending HDR+ request.
14694 {
14695 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14696 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14697
14698 // Return the buffer to pic channel.
14699 QCamera3PicChannel *picChannel =
14700 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14701 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14702
14703 mHdrPlusPendingRequests.erase(pendingRequest);
14704 }
14705
14706 pthread_mutex_lock(&mMutex);
14707
14708 // Find the pending buffers.
14709 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14710 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14711 if (pendingBuffers->frame_number == failedResult->requestId) {
14712 break;
14713 }
14714 pendingBuffers++;
14715 }
14716
14717 // Send out buffer errors for the pending buffers.
14718 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14719 std::vector<camera3_stream_buffer_t> streamBuffers;
14720 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14721 // Prepare a stream buffer.
14722 camera3_stream_buffer_t streamBuffer = {};
14723 streamBuffer.stream = buffer.stream;
14724 streamBuffer.buffer = buffer.buffer;
14725 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14726 streamBuffer.acquire_fence = -1;
14727 streamBuffer.release_fence = -1;
14728
14729 streamBuffers.push_back(streamBuffer);
14730
14731 // Send out error buffer event.
14732 camera3_notify_msg_t notify_msg = {};
14733 notify_msg.type = CAMERA3_MSG_ERROR;
14734 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14735 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14736 notify_msg.message.error.error_stream = buffer.stream;
14737
14738 orchestrateNotify(&notify_msg);
14739 }
14740
14741 camera3_capture_result_t result = {};
14742 result.frame_number = pendingBuffers->frame_number;
14743 result.num_output_buffers = streamBuffers.size();
14744 result.output_buffers = &streamBuffers[0];
14745
14746 // Send out result with buffer errors.
14747 orchestrateResult(&result);
14748
14749 // Remove pending buffers.
14750 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14751 }
14752
14753 // Remove pending request.
14754 auto halRequest = mPendingRequestsList.begin();
14755 while (halRequest != mPendingRequestsList.end()) {
14756 if (halRequest->frame_number == failedResult->requestId) {
14757 mPendingRequestsList.erase(halRequest);
14758 break;
14759 }
14760 halRequest++;
14761 }
14762
14763 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014764}
14765
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014766
14767ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14768 mParent(parent) {}
14769
14770void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14771{
14772 std::lock_guard<std::mutex> lock(mLock);
14773 mShutters.emplace(frameNumber, Shutter());
14774}
14775
14776void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14777{
14778 std::lock_guard<std::mutex> lock(mLock);
14779
14780 // Make this frame's shutter ready.
14781 auto shutter = mShutters.find(frameNumber);
14782 if (shutter == mShutters.end()) {
14783 // Shutter was already sent.
14784 return;
14785 }
14786
14787 shutter->second.ready = true;
14788 shutter->second.timestamp = timestamp;
14789
14790 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14791 shutter = mShutters.begin();
14792 while (shutter != mShutters.end()) {
14793 if (!shutter->second.ready) {
14794 // If this shutter is not ready, the following shutters can't be sent.
14795 break;
14796 }
14797
14798 camera3_notify_msg_t msg = {};
14799 msg.type = CAMERA3_MSG_SHUTTER;
14800 msg.message.shutter.frame_number = shutter->first;
14801 msg.message.shutter.timestamp = shutter->second.timestamp;
14802 mParent->orchestrateNotify(&msg);
14803
14804 shutter = mShutters.erase(shutter);
14805 }
14806}
14807
14808void ShutterDispatcher::clear(uint32_t frameNumber)
14809{
14810 std::lock_guard<std::mutex> lock(mLock);
14811 mShutters.erase(frameNumber);
14812}
14813
14814void ShutterDispatcher::clear()
14815{
14816 std::lock_guard<std::mutex> lock(mLock);
14817
14818 // Log errors for stale shutters.
14819 for (auto &shutter : mShutters) {
14820 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14821 __FUNCTION__, shutter.first, shutter.second.ready,
14822 shutter.second.timestamp);
14823 }
14824 mShutters.clear();
14825}
14826
14827OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14828 mParent(parent) {}
14829
14830status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14831{
14832 std::lock_guard<std::mutex> lock(mLock);
14833 mStreamBuffers.clear();
14834 if (!streamList) {
14835 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14836 return -EINVAL;
14837 }
14838
14839 // Create a "frame-number -> buffer" map for each stream.
14840 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14841 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14842 }
14843
14844 return OK;
14845}
14846
14847status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14848{
14849 std::lock_guard<std::mutex> lock(mLock);
14850
14851 // Find the "frame-number -> buffer" map for the stream.
14852 auto buffers = mStreamBuffers.find(stream);
14853 if (buffers == mStreamBuffers.end()) {
14854 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14855 return -EINVAL;
14856 }
14857
14858 // Create an unready buffer for this frame number.
14859 buffers->second.emplace(frameNumber, Buffer());
14860 return OK;
14861}
14862
14863void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14864 const camera3_stream_buffer_t &buffer)
14865{
14866 std::lock_guard<std::mutex> lock(mLock);
14867
14868 // Find the frame number -> buffer map for the stream.
14869 auto buffers = mStreamBuffers.find(buffer.stream);
14870 if (buffers == mStreamBuffers.end()) {
14871 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14872 return;
14873 }
14874
14875 // Find the unready buffer this frame number and mark it ready.
14876 auto pendingBuffer = buffers->second.find(frameNumber);
14877 if (pendingBuffer == buffers->second.end()) {
14878 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14879 return;
14880 }
14881
14882 pendingBuffer->second.ready = true;
14883 pendingBuffer->second.buffer = buffer;
14884
14885 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14886 pendingBuffer = buffers->second.begin();
14887 while (pendingBuffer != buffers->second.end()) {
14888 if (!pendingBuffer->second.ready) {
14889 // If this buffer is not ready, the following buffers can't be sent.
14890 break;
14891 }
14892
14893 camera3_capture_result_t result = {};
14894 result.frame_number = pendingBuffer->first;
14895 result.num_output_buffers = 1;
14896 result.output_buffers = &pendingBuffer->second.buffer;
14897
14898 // Send out result with buffer errors.
14899 mParent->orchestrateResult(&result);
14900
14901 pendingBuffer = buffers->second.erase(pendingBuffer);
14902 }
14903}
14904
14905void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14906{
14907 std::lock_guard<std::mutex> lock(mLock);
14908
14909 // Log errors for stale buffers.
14910 for (auto &buffers : mStreamBuffers) {
14911 for (auto &buffer : buffers.second) {
14912 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14913 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14914 }
14915 buffers.second.clear();
14916 }
14917
14918 if (clearConfiguredStreams) {
14919 mStreamBuffers.clear();
14920 }
14921}
14922
Thierry Strudel3d639192016-09-09 11:52:26 -070014923}; //end namespace qcamera