blob: 1b18c54f1fe16f5cad3de4a14c05d79cde8f7518 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Chien-Yu Chen63fc73b2017-04-26 16:43:28 -0700136
137// TODO: Enabl HDR+ for front camera after it's supported. b/37723569.
138#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
148EaselManagerClient gEaselManagerClient;
149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700152bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700153bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700154
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800155// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
156bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700157
158Mutex gHdrPlusClientLock; // Protect above Easel related variables.
159
Thierry Strudel3d639192016-09-09 11:52:26 -0700160
161const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
162 {"On", CAM_CDS_MODE_ON},
163 {"Off", CAM_CDS_MODE_OFF},
164 {"Auto",CAM_CDS_MODE_AUTO}
165};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700166const QCamera3HardwareInterface::QCameraMap<
167 camera_metadata_enum_android_video_hdr_mode_t,
168 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
169 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
170 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
171};
172
Thierry Strudel54dc9782017-02-15 12:12:10 -0800173const QCamera3HardwareInterface::QCameraMap<
174 camera_metadata_enum_android_binning_correction_mode_t,
175 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
176 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
177 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
178};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700179
180const QCamera3HardwareInterface::QCameraMap<
181 camera_metadata_enum_android_ir_mode_t,
182 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
183 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
184 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
185 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
186};
Thierry Strudel3d639192016-09-09 11:52:26 -0700187
188const QCamera3HardwareInterface::QCameraMap<
189 camera_metadata_enum_android_control_effect_mode_t,
190 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
191 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
192 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
193 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
194 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
195 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
196 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
200};
201
202const QCamera3HardwareInterface::QCameraMap<
203 camera_metadata_enum_android_control_awb_mode_t,
204 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
205 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
206 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
207 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
208 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
209 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
210 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
213 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
214};
215
216const QCamera3HardwareInterface::QCameraMap<
217 camera_metadata_enum_android_control_scene_mode_t,
218 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
219 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
220 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
221 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
222 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
225 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
226 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
227 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
228 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
229 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
230 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
231 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
232 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
233 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800234 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
235 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_control_af_mode_t,
240 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
243 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
244 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
245 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_color_correction_aberration_mode_t,
252 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
254 CAM_COLOR_CORRECTION_ABERRATION_OFF },
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
256 CAM_COLOR_CORRECTION_ABERRATION_FAST },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
258 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
259};
260
261const QCamera3HardwareInterface::QCameraMap<
262 camera_metadata_enum_android_control_ae_antibanding_mode_t,
263 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
268};
269
270const QCamera3HardwareInterface::QCameraMap<
271 camera_metadata_enum_android_control_ae_mode_t,
272 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
273 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
276 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
278};
279
280const QCamera3HardwareInterface::QCameraMap<
281 camera_metadata_enum_android_flash_mode_t,
282 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
283 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
284 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
285 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
286};
287
288const QCamera3HardwareInterface::QCameraMap<
289 camera_metadata_enum_android_statistics_face_detect_mode_t,
290 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
294};
295
296const QCamera3HardwareInterface::QCameraMap<
297 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
298 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
300 CAM_FOCUS_UNCALIBRATED },
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
302 CAM_FOCUS_APPROXIMATE },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
304 CAM_FOCUS_CALIBRATED }
305};
306
307const QCamera3HardwareInterface::QCameraMap<
308 camera_metadata_enum_android_lens_state_t,
309 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
310 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
311 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
312};
313
314const int32_t available_thumbnail_sizes[] = {0, 0,
315 176, 144,
316 240, 144,
317 256, 144,
318 240, 160,
319 256, 154,
320 240, 240,
321 320, 240};
322
323const QCamera3HardwareInterface::QCameraMap<
324 camera_metadata_enum_android_sensor_test_pattern_mode_t,
325 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
332};
333
334/* Since there is no mapping for all the options some Android enum are not listed.
335 * Also, the order in this list is important because while mapping from HAL to Android it will
336 * traverse from lower to higher index which means that for HAL values that are map to different
337 * Android values, the traverse logic will select the first one found.
338 */
339const QCamera3HardwareInterface::QCameraMap<
340 camera_metadata_enum_android_sensor_reference_illuminant1_t,
341 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
358};
359
360const QCamera3HardwareInterface::QCameraMap<
361 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
362 { 60, CAM_HFR_MODE_60FPS},
363 { 90, CAM_HFR_MODE_90FPS},
364 { 120, CAM_HFR_MODE_120FPS},
365 { 150, CAM_HFR_MODE_150FPS},
366 { 180, CAM_HFR_MODE_180FPS},
367 { 210, CAM_HFR_MODE_210FPS},
368 { 240, CAM_HFR_MODE_240FPS},
369 { 480, CAM_HFR_MODE_480FPS},
370};
371
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700372const QCamera3HardwareInterface::QCameraMap<
373 qcamera3_ext_instant_aec_mode_t,
374 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
375 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
377 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
378};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800379
380const QCamera3HardwareInterface::QCameraMap<
381 qcamera3_ext_exposure_meter_mode_t,
382 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
383 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
384 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
385 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
386 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
387 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
389 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
390};
391
392const QCamera3HardwareInterface::QCameraMap<
393 qcamera3_ext_iso_mode_t,
394 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
395 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
396 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
397 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
398 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
399 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
400 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
401 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
402 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
403};
404
Thierry Strudel3d639192016-09-09 11:52:26 -0700405camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
406 .initialize = QCamera3HardwareInterface::initialize,
407 .configure_streams = QCamera3HardwareInterface::configure_streams,
408 .register_stream_buffers = NULL,
409 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
410 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
411 .get_metadata_vendor_tag_ops = NULL,
412 .dump = QCamera3HardwareInterface::dump,
413 .flush = QCamera3HardwareInterface::flush,
414 .reserved = {0},
415};
416
417// initialise to some default value
418uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
419
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700420static inline void logEaselEvent(const char *tag, const char *event) {
421 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
422 struct timespec ts = {};
423 static int64_t kMsPerSec = 1000;
424 static int64_t kNsPerMs = 1000000;
425 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
426 if (res != OK) {
427 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
428 } else {
429 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
430 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
431 }
432 }
433}
434
Thierry Strudel3d639192016-09-09 11:52:26 -0700435/*===========================================================================
436 * FUNCTION : QCamera3HardwareInterface
437 *
438 * DESCRIPTION: constructor of QCamera3HardwareInterface
439 *
440 * PARAMETERS :
441 * @cameraId : camera ID
442 *
443 * RETURN : none
444 *==========================================================================*/
445QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
446 const camera_module_callbacks_t *callbacks)
447 : mCameraId(cameraId),
448 mCameraHandle(NULL),
449 mCameraInitialized(false),
450 mCallbackOps(NULL),
451 mMetadataChannel(NULL),
452 mPictureChannel(NULL),
453 mRawChannel(NULL),
454 mSupportChannel(NULL),
455 mAnalysisChannel(NULL),
456 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700457 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800459 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800460 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mChannelHandle(0),
462 mFirstConfiguration(true),
463 mFlush(false),
464 mFlushPerf(false),
465 mParamHeap(NULL),
466 mParameters(NULL),
467 mPrevParameters(NULL),
468 m_bIsVideo(false),
469 m_bIs4KVideo(false),
470 m_bEisSupportedSize(false),
471 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800472 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700473 m_MobicatMask(0),
474 mMinProcessedFrameDuration(0),
475 mMinJpegFrameDuration(0),
476 mMinRawFrameDuration(0),
477 mMetaFrameCount(0U),
478 mUpdateDebugLevel(false),
479 mCallbacks(callbacks),
480 mCaptureIntent(0),
481 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700482 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800483 /* DevCamDebug metadata internal m control*/
484 mDevCamDebugMetaEnable(0),
485 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mBatchSize(0),
487 mToBeQueuedVidBufs(0),
488 mHFRVideoFps(DEFAULT_VIDEO_FPS),
489 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800490 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800491 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700492 mFirstFrameNumberInBatch(0),
493 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800494 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700495 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
496 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000497 mPDSupported(false),
498 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700499 mInstantAEC(false),
500 mResetInstantAEC(false),
501 mInstantAECSettledFrameNumber(0),
502 mAecSkipDisplayFrameBound(0),
503 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800504 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700505 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700506 mLastCustIntentFrmNum(-1),
507 mState(CLOSED),
508 mIsDeviceLinked(false),
509 mIsMainCamera(true),
510 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700511 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800512 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800513 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700514 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800515 mIsApInputUsedForHdrPlus(false),
516 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700518{
519 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700520 mCommon.init(gCamCapability[cameraId]);
521 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#ifndef USE_HAL_3_3
523 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
524#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCameraDevice.common.close = close_camera_device;
528 mCameraDevice.ops = &mCameraOps;
529 mCameraDevice.priv = this;
530 gCamCapability[cameraId]->version = CAM_HAL_V3;
531 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
532 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
533 gCamCapability[cameraId]->min_num_pp_bufs = 3;
534
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800535 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800537 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700538 mPendingLiveRequest = 0;
539 mCurrentRequestId = -1;
540 pthread_mutex_init(&mMutex, NULL);
541
542 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
543 mDefaultMetadata[i] = NULL;
544
545 // Getting system props of different kinds
546 char prop[PROPERTY_VALUE_MAX];
547 memset(prop, 0, sizeof(prop));
548 property_get("persist.camera.raw.dump", prop, "0");
549 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800550 property_get("persist.camera.hal3.force.hdr", prop, "0");
551 mForceHdrSnapshot = atoi(prop);
552
Thierry Strudel3d639192016-09-09 11:52:26 -0700553 if (mEnableRawDump)
554 LOGD("Raw dump from Camera HAL enabled");
555
556 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
557 memset(mLdafCalib, 0, sizeof(mLdafCalib));
558
559 memset(prop, 0, sizeof(prop));
560 property_get("persist.camera.tnr.preview", prop, "0");
561 m_bTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.swtnr.preview", prop, "1");
565 m_bSwTnrPreview = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700568 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700569 m_bTnrVideo = (uint8_t)atoi(prop);
570
571 memset(prop, 0, sizeof(prop));
572 property_get("persist.camera.avtimer.debug", prop, "0");
573 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudel54dc9782017-02-15 12:12:10 -0800576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.cacmode.disable", prop, "0");
578 m_cacModeDisabled = (uint8_t)atoi(prop);
579
Thierry Strudel3d639192016-09-09 11:52:26 -0700580 //Load and read GPU library.
581 lib_surface_utils = NULL;
582 LINK_get_surface_pixel_alignment = NULL;
583 mSurfaceStridePadding = CAM_PAD_TO_32;
584 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
585 if (lib_surface_utils) {
586 *(void **)&LINK_get_surface_pixel_alignment =
587 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
588 if (LINK_get_surface_pixel_alignment) {
589 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
590 }
591 dlclose(lib_surface_utils);
592 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593
Emilian Peev0f3c3162017-03-15 12:57:46 +0000594 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
595 mPDSupported = (0 <= mPDIndex) ? true : false;
596
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700597 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700598}
599
600/*===========================================================================
601 * FUNCTION : ~QCamera3HardwareInterface
602 *
603 * DESCRIPTION: destructor of QCamera3HardwareInterface
604 *
605 * PARAMETERS : none
606 *
607 * RETURN : none
608 *==========================================================================*/
609QCamera3HardwareInterface::~QCamera3HardwareInterface()
610{
611 LOGD("E");
612
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800613 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700614
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800615 // Disable power hint and enable the perf lock for close camera
616 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
617 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
618
619 // unlink of dualcam during close camera
620 if (mIsDeviceLinked) {
621 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
622 &m_pDualCamCmdPtr->bundle_info;
623 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
624 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
625 pthread_mutex_lock(&gCamLock);
626
627 if (mIsMainCamera == 1) {
628 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
629 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
630 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
631 // related session id should be session id of linked session
632 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
633 } else {
634 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
638 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800639 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800640 pthread_mutex_unlock(&gCamLock);
641
642 rc = mCameraHandle->ops->set_dual_cam_cmd(
643 mCameraHandle->camera_handle);
644 if (rc < 0) {
645 LOGE("Dualcam: Unlink failed, but still proceed to close");
646 }
647 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700648
649 /* We need to stop all streams before deleting any stream */
650 if (mRawDumpChannel) {
651 mRawDumpChannel->stop();
652 }
653
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700654 if (mHdrPlusRawSrcChannel) {
655 mHdrPlusRawSrcChannel->stop();
656 }
657
Thierry Strudel3d639192016-09-09 11:52:26 -0700658 // NOTE: 'camera3_stream_t *' objects are already freed at
659 // this stage by the framework
660 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
661 it != mStreamInfo.end(); it++) {
662 QCamera3ProcessingChannel *channel = (*it)->channel;
663 if (channel) {
664 channel->stop();
665 }
666 }
667 if (mSupportChannel)
668 mSupportChannel->stop();
669
670 if (mAnalysisChannel) {
671 mAnalysisChannel->stop();
672 }
673 if (mMetadataChannel) {
674 mMetadataChannel->stop();
675 }
676 if (mChannelHandle) {
677 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
678 mChannelHandle);
679 LOGD("stopping channel %d", mChannelHandle);
680 }
681
682 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
683 it != mStreamInfo.end(); it++) {
684 QCamera3ProcessingChannel *channel = (*it)->channel;
685 if (channel)
686 delete channel;
687 free (*it);
688 }
689 if (mSupportChannel) {
690 delete mSupportChannel;
691 mSupportChannel = NULL;
692 }
693
694 if (mAnalysisChannel) {
695 delete mAnalysisChannel;
696 mAnalysisChannel = NULL;
697 }
698 if (mRawDumpChannel) {
699 delete mRawDumpChannel;
700 mRawDumpChannel = NULL;
701 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700702 if (mHdrPlusRawSrcChannel) {
703 delete mHdrPlusRawSrcChannel;
704 mHdrPlusRawSrcChannel = NULL;
705 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700706 if (mDummyBatchChannel) {
707 delete mDummyBatchChannel;
708 mDummyBatchChannel = NULL;
709 }
710
711 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800712 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700713
714 if (mMetadataChannel) {
715 delete mMetadataChannel;
716 mMetadataChannel = NULL;
717 }
718
719 /* Clean up all channels */
720 if (mCameraInitialized) {
721 if(!mFirstConfiguration){
722 //send the last unconfigure
723 cam_stream_size_info_t stream_config_info;
724 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
725 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
726 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800727 m_bIs4KVideo ? 0 :
728 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700729 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700730 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
731 stream_config_info);
732 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
733 if (rc < 0) {
734 LOGE("set_parms failed for unconfigure");
735 }
736 }
737 deinitParameters();
738 }
739
740 if (mChannelHandle) {
741 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
742 mChannelHandle);
743 LOGH("deleting channel %d", mChannelHandle);
744 mChannelHandle = 0;
745 }
746
747 if (mState != CLOSED)
748 closeCamera();
749
750 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
751 req.mPendingBufferList.clear();
752 }
753 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700754 for (pendingRequestIterator i = mPendingRequestsList.begin();
755 i != mPendingRequestsList.end();) {
756 i = erasePendingRequest(i);
757 }
758 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
759 if (mDefaultMetadata[i])
760 free_camera_metadata(mDefaultMetadata[i]);
761
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800762 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700763
764 pthread_cond_destroy(&mRequestCond);
765
766 pthread_cond_destroy(&mBuffersCond);
767
768 pthread_mutex_destroy(&mMutex);
769 LOGD("X");
770}
771
772/*===========================================================================
773 * FUNCTION : erasePendingRequest
774 *
775 * DESCRIPTION: function to erase a desired pending request after freeing any
776 * allocated memory
777 *
778 * PARAMETERS :
779 * @i : iterator pointing to pending request to be erased
780 *
781 * RETURN : iterator pointing to the next request
782 *==========================================================================*/
783QCamera3HardwareInterface::pendingRequestIterator
784 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
785{
786 if (i->input_buffer != NULL) {
787 free(i->input_buffer);
788 i->input_buffer = NULL;
789 }
790 if (i->settings != NULL)
791 free_camera_metadata((camera_metadata_t*)i->settings);
792 return mPendingRequestsList.erase(i);
793}
794
795/*===========================================================================
796 * FUNCTION : camEvtHandle
797 *
798 * DESCRIPTION: Function registered to mm-camera-interface to handle events
799 *
800 * PARAMETERS :
801 * @camera_handle : interface layer camera handle
802 * @evt : ptr to event
803 * @user_data : user data ptr
804 *
805 * RETURN : none
806 *==========================================================================*/
807void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
808 mm_camera_event_t *evt,
809 void *user_data)
810{
811 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
812 if (obj && evt) {
813 switch(evt->server_event_type) {
814 case CAM_EVENT_TYPE_DAEMON_DIED:
815 pthread_mutex_lock(&obj->mMutex);
816 obj->mState = ERROR;
817 pthread_mutex_unlock(&obj->mMutex);
818 LOGE("Fatal, camera daemon died");
819 break;
820
821 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
822 LOGD("HAL got request pull from Daemon");
823 pthread_mutex_lock(&obj->mMutex);
824 obj->mWokenUpByDaemon = true;
825 obj->unblockRequestIfNecessary();
826 pthread_mutex_unlock(&obj->mMutex);
827 break;
828
829 default:
830 LOGW("Warning: Unhandled event %d",
831 evt->server_event_type);
832 break;
833 }
834 } else {
835 LOGE("NULL user_data/evt");
836 }
837}
838
839/*===========================================================================
840 * FUNCTION : openCamera
841 *
842 * DESCRIPTION: open camera
843 *
844 * PARAMETERS :
845 * @hw_device : double ptr for camera device struct
846 *
847 * RETURN : int32_t type of status
848 * NO_ERROR -- success
849 * none-zero failure code
850 *==========================================================================*/
851int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
852{
853 int rc = 0;
854 if (mState != CLOSED) {
855 *hw_device = NULL;
856 return PERMISSION_DENIED;
857 }
858
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800859 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700860 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
861 mCameraId);
862
863 rc = openCamera();
864 if (rc == 0) {
865 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700869
Thierry Strudel3d639192016-09-09 11:52:26 -0700870 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
871 mCameraId, rc);
872
873 if (rc == NO_ERROR) {
874 mState = OPENED;
875 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800876
Thierry Strudel3d639192016-09-09 11:52:26 -0700877 return rc;
878}
879
880/*===========================================================================
881 * FUNCTION : openCamera
882 *
883 * DESCRIPTION: open camera
884 *
885 * PARAMETERS : none
886 *
887 * RETURN : int32_t type of status
888 * NO_ERROR -- success
889 * none-zero failure code
890 *==========================================================================*/
891int QCamera3HardwareInterface::openCamera()
892{
893 int rc = 0;
894 char value[PROPERTY_VALUE_MAX];
895
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800896 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700897 if (mCameraHandle) {
898 LOGE("Failure: Camera already opened");
899 return ALREADY_EXISTS;
900 }
901
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 {
903 Mutex::Autolock l(gHdrPlusClientLock);
904 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700905 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700906 rc = gEaselManagerClient.resume();
907 if (rc != 0) {
908 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
909 return rc;
910 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800911 }
912 }
913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
915 if (rc < 0) {
916 LOGE("Failed to reserve flash for camera id: %d",
917 mCameraId);
918 return UNKNOWN_ERROR;
919 }
920
921 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
922 if (rc) {
923 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
924 return rc;
925 }
926
927 if (!mCameraHandle) {
928 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
929 return -ENODEV;
930 }
931
932 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
933 camEvtHandle, (void *)this);
934
935 if (rc < 0) {
936 LOGE("Error, failed to register event callback");
937 /* Not closing camera here since it is already handled in destructor */
938 return FAILED_TRANSACTION;
939 }
940
941 mExifParams.debug_params =
942 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
943 if (mExifParams.debug_params) {
944 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
945 } else {
946 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
947 return NO_MEMORY;
948 }
949 mFirstConfiguration = true;
950
951 //Notify display HAL that a camera session is active.
952 //But avoid calling the same during bootup because camera service might open/close
953 //cameras at boot time during its initialization and display service will also internally
954 //wait for camera service to initialize first while calling this display API, resulting in a
955 //deadlock situation. Since boot time camera open/close calls are made only to fetch
956 //capabilities, no need of this display bw optimization.
957 //Use "service.bootanim.exit" property to know boot status.
958 property_get("service.bootanim.exit", value, "0");
959 if (atoi(value) == 1) {
960 pthread_mutex_lock(&gCamLock);
961 if (gNumCameraSessions++ == 0) {
962 setCameraLaunchStatus(true);
963 }
964 pthread_mutex_unlock(&gCamLock);
965 }
966
967 //fill the session id needed while linking dual cam
968 pthread_mutex_lock(&gCamLock);
969 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
970 &sessionId[mCameraId]);
971 pthread_mutex_unlock(&gCamLock);
972
973 if (rc < 0) {
974 LOGE("Error, failed to get sessiion id");
975 return UNKNOWN_ERROR;
976 } else {
977 //Allocate related cam sync buffer
978 //this is needed for the payload that goes along with bundling cmd for related
979 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700980 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
981 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700982 if(rc != OK) {
983 rc = NO_MEMORY;
984 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
985 return NO_MEMORY;
986 }
987
988 //Map memory for related cam sync buffer
989 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700990 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
991 m_pDualCamCmdHeap->getFd(0),
992 sizeof(cam_dual_camera_cmd_info_t),
993 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700994 if(rc < 0) {
995 LOGE("Dualcam: failed to map Related cam sync buffer");
996 rc = FAILED_TRANSACTION;
997 return NO_MEMORY;
998 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700999 m_pDualCamCmdPtr =
1000 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001001 }
1002
1003 LOGH("mCameraId=%d",mCameraId);
1004
1005 return NO_ERROR;
1006}
1007
1008/*===========================================================================
1009 * FUNCTION : closeCamera
1010 *
1011 * DESCRIPTION: close camera
1012 *
1013 * PARAMETERS : none
1014 *
1015 * RETURN : int32_t type of status
1016 * NO_ERROR -- success
1017 * none-zero failure code
1018 *==========================================================================*/
1019int QCamera3HardwareInterface::closeCamera()
1020{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001021 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001022 int rc = NO_ERROR;
1023 char value[PROPERTY_VALUE_MAX];
1024
1025 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1026 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001027
1028 // unmap memory for related cam sync buffer
1029 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001030 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001031 if (NULL != m_pDualCamCmdHeap) {
1032 m_pDualCamCmdHeap->deallocate();
1033 delete m_pDualCamCmdHeap;
1034 m_pDualCamCmdHeap = NULL;
1035 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001036 }
1037
Thierry Strudel3d639192016-09-09 11:52:26 -07001038 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1039 mCameraHandle = NULL;
1040
1041 //reset session id to some invalid id
1042 pthread_mutex_lock(&gCamLock);
1043 sessionId[mCameraId] = 0xDEADBEEF;
1044 pthread_mutex_unlock(&gCamLock);
1045
1046 //Notify display HAL that there is no active camera session
1047 //but avoid calling the same during bootup. Refer to openCamera
1048 //for more details.
1049 property_get("service.bootanim.exit", value, "0");
1050 if (atoi(value) == 1) {
1051 pthread_mutex_lock(&gCamLock);
1052 if (--gNumCameraSessions == 0) {
1053 setCameraLaunchStatus(false);
1054 }
1055 pthread_mutex_unlock(&gCamLock);
1056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 if (mExifParams.debug_params) {
1059 free(mExifParams.debug_params);
1060 mExifParams.debug_params = NULL;
1061 }
1062 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1063 LOGW("Failed to release flash for camera id: %d",
1064 mCameraId);
1065 }
1066 mState = CLOSED;
1067 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1068 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001069
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001070 {
1071 Mutex::Autolock l(gHdrPlusClientLock);
1072 if (gHdrPlusClient != nullptr) {
1073 // Disable HDR+ mode.
1074 disableHdrPlusModeLocked();
1075 // Disconnect Easel if it's connected.
1076 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1077 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001078 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001079
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001080 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001081 rc = gEaselManagerClient.stopMipi(mCameraId);
1082 if (rc != 0) {
1083 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1084 }
1085
1086 rc = gEaselManagerClient.suspend();
1087 if (rc != 0) {
1088 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1089 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001090 }
1091 }
1092
Thierry Strudel3d639192016-09-09 11:52:26 -07001093 return rc;
1094}
1095
1096/*===========================================================================
1097 * FUNCTION : initialize
1098 *
1099 * DESCRIPTION: Initialize frameworks callback functions
1100 *
1101 * PARAMETERS :
1102 * @callback_ops : callback function to frameworks
1103 *
1104 * RETURN :
1105 *
1106 *==========================================================================*/
1107int QCamera3HardwareInterface::initialize(
1108 const struct camera3_callback_ops *callback_ops)
1109{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001110 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001111 int rc;
1112
1113 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1114 pthread_mutex_lock(&mMutex);
1115
1116 // Validate current state
1117 switch (mState) {
1118 case OPENED:
1119 /* valid state */
1120 break;
1121 default:
1122 LOGE("Invalid state %d", mState);
1123 rc = -ENODEV;
1124 goto err1;
1125 }
1126
1127 rc = initParameters();
1128 if (rc < 0) {
1129 LOGE("initParamters failed %d", rc);
1130 goto err1;
1131 }
1132 mCallbackOps = callback_ops;
1133
1134 mChannelHandle = mCameraHandle->ops->add_channel(
1135 mCameraHandle->camera_handle, NULL, NULL, this);
1136 if (mChannelHandle == 0) {
1137 LOGE("add_channel failed");
1138 rc = -ENOMEM;
1139 pthread_mutex_unlock(&mMutex);
1140 return rc;
1141 }
1142
1143 pthread_mutex_unlock(&mMutex);
1144 mCameraInitialized = true;
1145 mState = INITIALIZED;
1146 LOGI("X");
1147 return 0;
1148
1149err1:
1150 pthread_mutex_unlock(&mMutex);
1151 return rc;
1152}
1153
1154/*===========================================================================
1155 * FUNCTION : validateStreamDimensions
1156 *
1157 * DESCRIPTION: Check if the configuration requested are those advertised
1158 *
1159 * PARAMETERS :
1160 * @stream_list : streams to be configured
1161 *
1162 * RETURN :
1163 *
1164 *==========================================================================*/
1165int QCamera3HardwareInterface::validateStreamDimensions(
1166 camera3_stream_configuration_t *streamList)
1167{
1168 int rc = NO_ERROR;
1169 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001170 uint32_t depthWidth = 0;
1171 uint32_t depthHeight = 0;
1172 if (mPDSupported) {
1173 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1174 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1175 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001176
1177 camera3_stream_t *inputStream = NULL;
1178 /*
1179 * Loop through all streams to find input stream if it exists*
1180 */
1181 for (size_t i = 0; i< streamList->num_streams; i++) {
1182 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1183 if (inputStream != NULL) {
1184 LOGE("Error, Multiple input streams requested");
1185 return -EINVAL;
1186 }
1187 inputStream = streamList->streams[i];
1188 }
1189 }
1190 /*
1191 * Loop through all streams requested in configuration
1192 * Check if unsupported sizes have been requested on any of them
1193 */
1194 for (size_t j = 0; j < streamList->num_streams; j++) {
1195 bool sizeFound = false;
1196 camera3_stream_t *newStream = streamList->streams[j];
1197
1198 uint32_t rotatedHeight = newStream->height;
1199 uint32_t rotatedWidth = newStream->width;
1200 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1201 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1202 rotatedHeight = newStream->width;
1203 rotatedWidth = newStream->height;
1204 }
1205
1206 /*
1207 * Sizes are different for each type of stream format check against
1208 * appropriate table.
1209 */
1210 switch (newStream->format) {
1211 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1212 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1213 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001214 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1215 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1216 mPDSupported) {
1217 if ((depthWidth == newStream->width) &&
1218 (depthHeight == newStream->height)) {
1219 sizeFound = true;
1220 }
1221 break;
1222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001223 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1224 for (size_t i = 0; i < count; i++) {
1225 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1226 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1227 sizeFound = true;
1228 break;
1229 }
1230 }
1231 break;
1232 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1234 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001235 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001236 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001237 if ((depthSamplesCount == newStream->width) &&
1238 (1 == newStream->height)) {
1239 sizeFound = true;
1240 }
1241 break;
1242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001243 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1244 /* Verify set size against generated sizes table */
1245 for (size_t i = 0; i < count; i++) {
1246 if (((int32_t)rotatedWidth ==
1247 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1248 ((int32_t)rotatedHeight ==
1249 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1250 sizeFound = true;
1251 break;
1252 }
1253 }
1254 break;
1255 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1256 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1257 default:
1258 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1259 || newStream->stream_type == CAMERA3_STREAM_INPUT
1260 || IS_USAGE_ZSL(newStream->usage)) {
1261 if (((int32_t)rotatedWidth ==
1262 gCamCapability[mCameraId]->active_array_size.width) &&
1263 ((int32_t)rotatedHeight ==
1264 gCamCapability[mCameraId]->active_array_size.height)) {
1265 sizeFound = true;
1266 break;
1267 }
1268 /* We could potentially break here to enforce ZSL stream
1269 * set from frameworks always is full active array size
1270 * but it is not clear from the spc if framework will always
1271 * follow that, also we have logic to override to full array
1272 * size, so keeping the logic lenient at the moment
1273 */
1274 }
1275 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1276 MAX_SIZES_CNT);
1277 for (size_t i = 0; i < count; i++) {
1278 if (((int32_t)rotatedWidth ==
1279 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1280 ((int32_t)rotatedHeight ==
1281 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1282 sizeFound = true;
1283 break;
1284 }
1285 }
1286 break;
1287 } /* End of switch(newStream->format) */
1288
1289 /* We error out even if a single stream has unsupported size set */
1290 if (!sizeFound) {
1291 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1292 rotatedWidth, rotatedHeight, newStream->format,
1293 gCamCapability[mCameraId]->active_array_size.width,
1294 gCamCapability[mCameraId]->active_array_size.height);
1295 rc = -EINVAL;
1296 break;
1297 }
1298 } /* End of for each stream */
1299 return rc;
1300}
1301
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001302/*===========================================================================
1303 * FUNCTION : validateUsageFlags
1304 *
1305 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1306 *
1307 * PARAMETERS :
1308 * @stream_list : streams to be configured
1309 *
1310 * RETURN :
1311 * NO_ERROR if the usage flags are supported
1312 * error code if usage flags are not supported
1313 *
1314 *==========================================================================*/
1315int QCamera3HardwareInterface::validateUsageFlags(
1316 const camera3_stream_configuration_t* streamList)
1317{
1318 for (size_t j = 0; j < streamList->num_streams; j++) {
1319 const camera3_stream_t *newStream = streamList->streams[j];
1320
1321 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1322 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1323 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1324 continue;
1325 }
1326
1327 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1328 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1329 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1330 bool forcePreviewUBWC = true;
1331 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1332 forcePreviewUBWC = false;
1333 }
1334 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1336 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1338 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1339 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1340
1341 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1342 // So color spaces will always match.
1343
1344 // Check whether underlying formats of shared streams match.
1345 if (isVideo && isPreview && videoFormat != previewFormat) {
1346 LOGE("Combined video and preview usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isPreview && isZSL && previewFormat != zslFormat) {
1350 LOGE("Combined preview and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 if (isVideo && isZSL && videoFormat != zslFormat) {
1354 LOGE("Combined video and zsl usage flag is not supported");
1355 return -EINVAL;
1356 }
1357 }
1358 return NO_ERROR;
1359}
1360
1361/*===========================================================================
1362 * FUNCTION : validateUsageFlagsForEis
1363 *
1364 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1365 *
1366 * PARAMETERS :
1367 * @stream_list : streams to be configured
1368 *
1369 * RETURN :
1370 * NO_ERROR if the usage flags are supported
1371 * error code if usage flags are not supported
1372 *
1373 *==========================================================================*/
1374int QCamera3HardwareInterface::validateUsageFlagsForEis(
1375 const camera3_stream_configuration_t* streamList)
1376{
1377 for (size_t j = 0; j < streamList->num_streams; j++) {
1378 const camera3_stream_t *newStream = streamList->streams[j];
1379
1380 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1381 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1382
1383 // Because EIS is "hard-coded" for certain use case, and current
1384 // implementation doesn't support shared preview and video on the same
1385 // stream, return failure if EIS is forced on.
1386 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1387 LOGE("Combined video and preview usage flag is not supported due to EIS");
1388 return -EINVAL;
1389 }
1390 }
1391 return NO_ERROR;
1392}
1393
Thierry Strudel3d639192016-09-09 11:52:26 -07001394/*==============================================================================
1395 * FUNCTION : isSupportChannelNeeded
1396 *
1397 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1398 *
1399 * PARAMETERS :
1400 * @stream_list : streams to be configured
1401 * @stream_config_info : the config info for streams to be configured
1402 *
1403 * RETURN : Boolen true/false decision
1404 *
1405 *==========================================================================*/
1406bool QCamera3HardwareInterface::isSupportChannelNeeded(
1407 camera3_stream_configuration_t *streamList,
1408 cam_stream_size_info_t stream_config_info)
1409{
1410 uint32_t i;
1411 bool pprocRequested = false;
1412 /* Check for conditions where PProc pipeline does not have any streams*/
1413 for (i = 0; i < stream_config_info.num_streams; i++) {
1414 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1415 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1416 pprocRequested = true;
1417 break;
1418 }
1419 }
1420
1421 if (pprocRequested == false )
1422 return true;
1423
1424 /* Dummy stream needed if only raw or jpeg streams present */
1425 for (i = 0; i < streamList->num_streams; i++) {
1426 switch(streamList->streams[i]->format) {
1427 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1428 case HAL_PIXEL_FORMAT_RAW10:
1429 case HAL_PIXEL_FORMAT_RAW16:
1430 case HAL_PIXEL_FORMAT_BLOB:
1431 break;
1432 default:
1433 return false;
1434 }
1435 }
1436 return true;
1437}
1438
1439/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001440 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001441 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001445 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001446 *
1447 * RETURN : int32_t type of status
1448 * NO_ERROR -- success
1449 * none-zero failure code
1450 *
1451 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001452int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001453{
1454 int32_t rc = NO_ERROR;
1455
1456 cam_dimension_t max_dim = {0, 0};
1457 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1458 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1459 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1460 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1461 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1462 }
1463
1464 clear_metadata_buffer(mParameters);
1465
1466 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1467 max_dim);
1468 if (rc != NO_ERROR) {
1469 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1470 return rc;
1471 }
1472
1473 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1474 if (rc != NO_ERROR) {
1475 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1476 return rc;
1477 }
1478
1479 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001480 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001481
1482 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1483 mParameters);
1484 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001486 return rc;
1487 }
1488
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001490 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1491 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1492 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1493 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1494 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001495
1496 return rc;
1497}
1498
1499/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001500 * FUNCTION : addToPPFeatureMask
1501 *
1502 * DESCRIPTION: add additional features to pp feature mask based on
1503 * stream type and usecase
1504 *
1505 * PARAMETERS :
1506 * @stream_format : stream type for feature mask
1507 * @stream_idx : stream idx within postprocess_mask list to change
1508 *
1509 * RETURN : NULL
1510 *
1511 *==========================================================================*/
1512void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1513 uint32_t stream_idx)
1514{
1515 char feature_mask_value[PROPERTY_VALUE_MAX];
1516 cam_feature_mask_t feature_mask;
1517 int args_converted;
1518 int property_len;
1519
1520 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001521#ifdef _LE_CAMERA_
1522 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1523 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, swtnr_feature_mask_value);
1526#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 property_len = property_get("persist.camera.hal3.feature",
1528 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001529#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001530 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1531 (feature_mask_value[1] == 'x')) {
1532 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1533 } else {
1534 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1535 }
1536 if (1 != args_converted) {
1537 feature_mask = 0;
1538 LOGE("Wrong feature mask %s", feature_mask_value);
1539 return;
1540 }
1541
1542 switch (stream_format) {
1543 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1544 /* Add LLVD to pp feature mask only if video hint is enabled */
1545 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QTI_FEATURE_SW_TNR;
1548 LOGH("Added SW TNR to pp feature mask");
1549 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1550 mStreamConfigInfo.postprocess_mask[stream_idx]
1551 |= CAM_QCOM_FEATURE_LLVD;
1552 LOGH("Added LLVD SeeMore to pp feature mask");
1553 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001554 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1557 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001558 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1559 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1560 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1561 CAM_QTI_FEATURE_BINNING_CORRECTION;
1562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001563 break;
1564 }
1565 default:
1566 break;
1567 }
1568 LOGD("PP feature mask %llx",
1569 mStreamConfigInfo.postprocess_mask[stream_idx]);
1570}
1571
1572/*==============================================================================
1573 * FUNCTION : updateFpsInPreviewBuffer
1574 *
1575 * DESCRIPTION: update FPS information in preview buffer.
1576 *
1577 * PARAMETERS :
1578 * @metadata : pointer to metadata buffer
1579 * @frame_number: frame_number to look for in pending buffer list
1580 *
1581 * RETURN : None
1582 *
1583 *==========================================================================*/
1584void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1585 uint32_t frame_number)
1586{
1587 // Mark all pending buffers for this particular request
1588 // with corresponding framerate information
1589 for (List<PendingBuffersInRequest>::iterator req =
1590 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1591 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1592 for(List<PendingBufferInfo>::iterator j =
1593 req->mPendingBufferList.begin();
1594 j != req->mPendingBufferList.end(); j++) {
1595 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1596 if ((req->frame_number == frame_number) &&
1597 (channel->getStreamTypeMask() &
1598 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1599 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1600 CAM_INTF_PARM_FPS_RANGE, metadata) {
1601 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1602 struct private_handle_t *priv_handle =
1603 (struct private_handle_t *)(*(j->buffer));
1604 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1605 }
1606 }
1607 }
1608 }
1609}
1610
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001611/*==============================================================================
1612 * FUNCTION : updateTimeStampInPendingBuffers
1613 *
1614 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1615 * of a frame number
1616 *
1617 * PARAMETERS :
1618 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1619 * @timestamp : timestamp to be set
1620 *
1621 * RETURN : None
1622 *
1623 *==========================================================================*/
1624void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1625 uint32_t frameNumber, nsecs_t timestamp)
1626{
1627 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1628 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1629 if (req->frame_number != frameNumber)
1630 continue;
1631
1632 for (auto k = req->mPendingBufferList.begin();
1633 k != req->mPendingBufferList.end(); k++ ) {
1634 struct private_handle_t *priv_handle =
1635 (struct private_handle_t *) (*(k->buffer));
1636 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1637 }
1638 }
1639 return;
1640}
1641
Thierry Strudel3d639192016-09-09 11:52:26 -07001642/*===========================================================================
1643 * FUNCTION : configureStreams
1644 *
1645 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1646 * and output streams.
1647 *
1648 * PARAMETERS :
1649 * @stream_list : streams to be configured
1650 *
1651 * RETURN :
1652 *
1653 *==========================================================================*/
1654int QCamera3HardwareInterface::configureStreams(
1655 camera3_stream_configuration_t *streamList)
1656{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001657 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 int rc = 0;
1659
1660 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001661 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001662 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001663 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001664
1665 return rc;
1666}
1667
1668/*===========================================================================
1669 * FUNCTION : configureStreamsPerfLocked
1670 *
1671 * DESCRIPTION: configureStreams while perfLock is held.
1672 *
1673 * PARAMETERS :
1674 * @stream_list : streams to be configured
1675 *
1676 * RETURN : int32_t type of status
1677 * NO_ERROR -- success
1678 * none-zero failure code
1679 *==========================================================================*/
1680int QCamera3HardwareInterface::configureStreamsPerfLocked(
1681 camera3_stream_configuration_t *streamList)
1682{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001683 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001684 int rc = 0;
1685
1686 // Sanity check stream_list
1687 if (streamList == NULL) {
1688 LOGE("NULL stream configuration");
1689 return BAD_VALUE;
1690 }
1691 if (streamList->streams == NULL) {
1692 LOGE("NULL stream list");
1693 return BAD_VALUE;
1694 }
1695
1696 if (streamList->num_streams < 1) {
1697 LOGE("Bad number of streams requested: %d",
1698 streamList->num_streams);
1699 return BAD_VALUE;
1700 }
1701
1702 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1703 LOGE("Maximum number of streams %d exceeded: %d",
1704 MAX_NUM_STREAMS, streamList->num_streams);
1705 return BAD_VALUE;
1706 }
1707
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001708 rc = validateUsageFlags(streamList);
1709 if (rc != NO_ERROR) {
1710 return rc;
1711 }
1712
Thierry Strudel3d639192016-09-09 11:52:26 -07001713 mOpMode = streamList->operation_mode;
1714 LOGD("mOpMode: %d", mOpMode);
1715
1716 /* first invalidate all the steams in the mStreamList
1717 * if they appear again, they will be validated */
1718 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1719 it != mStreamInfo.end(); it++) {
1720 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1721 if (channel) {
1722 channel->stop();
1723 }
1724 (*it)->status = INVALID;
1725 }
1726
1727 if (mRawDumpChannel) {
1728 mRawDumpChannel->stop();
1729 delete mRawDumpChannel;
1730 mRawDumpChannel = NULL;
1731 }
1732
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001733 if (mHdrPlusRawSrcChannel) {
1734 mHdrPlusRawSrcChannel->stop();
1735 delete mHdrPlusRawSrcChannel;
1736 mHdrPlusRawSrcChannel = NULL;
1737 }
1738
Thierry Strudel3d639192016-09-09 11:52:26 -07001739 if (mSupportChannel)
1740 mSupportChannel->stop();
1741
1742 if (mAnalysisChannel) {
1743 mAnalysisChannel->stop();
1744 }
1745 if (mMetadataChannel) {
1746 /* If content of mStreamInfo is not 0, there is metadata stream */
1747 mMetadataChannel->stop();
1748 }
1749 if (mChannelHandle) {
1750 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1751 mChannelHandle);
1752 LOGD("stopping channel %d", mChannelHandle);
1753 }
1754
1755 pthread_mutex_lock(&mMutex);
1756
1757 // Check state
1758 switch (mState) {
1759 case INITIALIZED:
1760 case CONFIGURED:
1761 case STARTED:
1762 /* valid state */
1763 break;
1764 default:
1765 LOGE("Invalid state %d", mState);
1766 pthread_mutex_unlock(&mMutex);
1767 return -ENODEV;
1768 }
1769
1770 /* Check whether we have video stream */
1771 m_bIs4KVideo = false;
1772 m_bIsVideo = false;
1773 m_bEisSupportedSize = false;
1774 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001775 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001777 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001778 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001779 uint32_t videoWidth = 0U;
1780 uint32_t videoHeight = 0U;
1781 size_t rawStreamCnt = 0;
1782 size_t stallStreamCnt = 0;
1783 size_t processedStreamCnt = 0;
1784 // Number of streams on ISP encoder path
1785 size_t numStreamsOnEncoder = 0;
1786 size_t numYuv888OnEncoder = 0;
1787 bool bYuv888OverrideJpeg = false;
1788 cam_dimension_t largeYuv888Size = {0, 0};
1789 cam_dimension_t maxViewfinderSize = {0, 0};
1790 bool bJpegExceeds4K = false;
1791 bool bJpegOnEncoder = false;
1792 bool bUseCommonFeatureMask = false;
1793 cam_feature_mask_t commonFeatureMask = 0;
1794 bool bSmallJpegSize = false;
1795 uint32_t width_ratio;
1796 uint32_t height_ratio;
1797 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1798 camera3_stream_t *inputStream = NULL;
1799 bool isJpeg = false;
1800 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001801 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001802 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001803
1804 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1805
1806 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 uint8_t eis_prop_set;
1808 uint32_t maxEisWidth = 0;
1809 uint32_t maxEisHeight = 0;
1810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001811 // Initialize all instant AEC related variables
1812 mInstantAEC = false;
1813 mResetInstantAEC = false;
1814 mInstantAECSettledFrameNumber = 0;
1815 mAecSkipDisplayFrameBound = 0;
1816 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001817 mCurrFeatureState = 0;
1818 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001819
Thierry Strudel3d639192016-09-09 11:52:26 -07001820 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1821
1822 size_t count = IS_TYPE_MAX;
1823 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1824 for (size_t i = 0; i < count; i++) {
1825 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001826 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1827 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 break;
1829 }
1830 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001831
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001832 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001833 maxEisWidth = MAX_EIS_WIDTH;
1834 maxEisHeight = MAX_EIS_HEIGHT;
1835 }
1836
1837 /* EIS setprop control */
1838 char eis_prop[PROPERTY_VALUE_MAX];
1839 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001840 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 eis_prop_set = (uint8_t)atoi(eis_prop);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001844 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1845
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001846 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1847 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001848
Thierry Strudel3d639192016-09-09 11:52:26 -07001849 /* stream configurations */
1850 for (size_t i = 0; i < streamList->num_streams; i++) {
1851 camera3_stream_t *newStream = streamList->streams[i];
1852 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1853 "height = %d, rotation = %d, usage = 0x%x",
1854 i, newStream->stream_type, newStream->format,
1855 newStream->width, newStream->height, newStream->rotation,
1856 newStream->usage);
1857 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1858 newStream->stream_type == CAMERA3_STREAM_INPUT){
1859 isZsl = true;
1860 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001861 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1862 IS_USAGE_PREVIEW(newStream->usage)) {
1863 isPreview = true;
1864 }
1865
Thierry Strudel3d639192016-09-09 11:52:26 -07001866 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1867 inputStream = newStream;
1868 }
1869
Emilian Peev7650c122017-01-19 08:24:33 -08001870 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1871 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001872 isJpeg = true;
1873 jpegSize.width = newStream->width;
1874 jpegSize.height = newStream->height;
1875 if (newStream->width > VIDEO_4K_WIDTH ||
1876 newStream->height > VIDEO_4K_HEIGHT)
1877 bJpegExceeds4K = true;
1878 }
1879
1880 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1881 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1882 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001883 // In HAL3 we can have multiple different video streams.
1884 // The variables video width and height are used below as
1885 // dimensions of the biggest of them
1886 if (videoWidth < newStream->width ||
1887 videoHeight < newStream->height) {
1888 videoWidth = newStream->width;
1889 videoHeight = newStream->height;
1890 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001891 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1892 (VIDEO_4K_HEIGHT <= newStream->height)) {
1893 m_bIs4KVideo = true;
1894 }
1895 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1896 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001897
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 }
1899 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1900 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1901 switch (newStream->format) {
1902 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001903 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1904 depthPresent = true;
1905 break;
1906 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001907 stallStreamCnt++;
1908 if (isOnEncoder(maxViewfinderSize, newStream->width,
1909 newStream->height)) {
1910 numStreamsOnEncoder++;
1911 bJpegOnEncoder = true;
1912 }
1913 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1914 newStream->width);
1915 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1916 newStream->height);;
1917 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1918 "FATAL: max_downscale_factor cannot be zero and so assert");
1919 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1920 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1921 LOGH("Setting small jpeg size flag to true");
1922 bSmallJpegSize = true;
1923 }
1924 break;
1925 case HAL_PIXEL_FORMAT_RAW10:
1926 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1927 case HAL_PIXEL_FORMAT_RAW16:
1928 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001929 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1930 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1931 pdStatCount++;
1932 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1935 processedStreamCnt++;
1936 if (isOnEncoder(maxViewfinderSize, newStream->width,
1937 newStream->height)) {
1938 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1939 !IS_USAGE_ZSL(newStream->usage)) {
1940 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1941 }
1942 numStreamsOnEncoder++;
1943 }
1944 break;
1945 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1946 processedStreamCnt++;
1947 if (isOnEncoder(maxViewfinderSize, newStream->width,
1948 newStream->height)) {
1949 // If Yuv888 size is not greater than 4K, set feature mask
1950 // to SUPERSET so that it support concurrent request on
1951 // YUV and JPEG.
1952 if (newStream->width <= VIDEO_4K_WIDTH &&
1953 newStream->height <= VIDEO_4K_HEIGHT) {
1954 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1955 }
1956 numStreamsOnEncoder++;
1957 numYuv888OnEncoder++;
1958 largeYuv888Size.width = newStream->width;
1959 largeYuv888Size.height = newStream->height;
1960 }
1961 break;
1962 default:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1967 numStreamsOnEncoder++;
1968 }
1969 break;
1970 }
1971
1972 }
1973 }
1974
1975 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1976 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1977 !m_bIsVideo) {
1978 m_bEisEnable = false;
1979 }
1980
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001981 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1982 pthread_mutex_unlock(&mMutex);
1983 return -EINVAL;
1984 }
1985
Thierry Strudel54dc9782017-02-15 12:12:10 -08001986 uint8_t forceEnableTnr = 0;
1987 char tnr_prop[PROPERTY_VALUE_MAX];
1988 memset(tnr_prop, 0, sizeof(tnr_prop));
1989 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1990 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1991
Thierry Strudel3d639192016-09-09 11:52:26 -07001992 /* Logic to enable/disable TNR based on specific config size/etc.*/
1993 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001994 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1995 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001996 else if (forceEnableTnr)
1997 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001998
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001999 char videoHdrProp[PROPERTY_VALUE_MAX];
2000 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2001 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2002 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2003
2004 if (hdr_mode_prop == 1 && m_bIsVideo &&
2005 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2006 m_bVideoHdrEnabled = true;
2007 else
2008 m_bVideoHdrEnabled = false;
2009
2010
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 /* Check if num_streams is sane */
2012 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2013 rawStreamCnt > MAX_RAW_STREAMS ||
2014 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2015 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2016 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2017 pthread_mutex_unlock(&mMutex);
2018 return -EINVAL;
2019 }
2020 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002021 if (isZsl && m_bIs4KVideo) {
2022 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002023 pthread_mutex_unlock(&mMutex);
2024 return -EINVAL;
2025 }
2026 /* Check if stream sizes are sane */
2027 if (numStreamsOnEncoder > 2) {
2028 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2029 pthread_mutex_unlock(&mMutex);
2030 return -EINVAL;
2031 } else if (1 < numStreamsOnEncoder){
2032 bUseCommonFeatureMask = true;
2033 LOGH("Multiple streams above max viewfinder size, common mask needed");
2034 }
2035
2036 /* Check if BLOB size is greater than 4k in 4k recording case */
2037 if (m_bIs4KVideo && bJpegExceeds4K) {
2038 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2039 pthread_mutex_unlock(&mMutex);
2040 return -EINVAL;
2041 }
2042
Emilian Peev7650c122017-01-19 08:24:33 -08002043 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2044 depthPresent) {
2045 LOGE("HAL doesn't support depth streams in HFR mode!");
2046 pthread_mutex_unlock(&mMutex);
2047 return -EINVAL;
2048 }
2049
Thierry Strudel3d639192016-09-09 11:52:26 -07002050 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2051 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2052 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2053 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2054 // configurations:
2055 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2056 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2057 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2058 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2059 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2060 __func__);
2061 pthread_mutex_unlock(&mMutex);
2062 return -EINVAL;
2063 }
2064
2065 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2066 // the YUV stream's size is greater or equal to the JPEG size, set common
2067 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2068 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2069 jpegSize.width, jpegSize.height) &&
2070 largeYuv888Size.width > jpegSize.width &&
2071 largeYuv888Size.height > jpegSize.height) {
2072 bYuv888OverrideJpeg = true;
2073 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2074 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2075 }
2076
2077 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2078 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2079 commonFeatureMask);
2080 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2081 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2082
2083 rc = validateStreamDimensions(streamList);
2084 if (rc == NO_ERROR) {
2085 rc = validateStreamRotations(streamList);
2086 }
2087 if (rc != NO_ERROR) {
2088 LOGE("Invalid stream configuration requested!");
2089 pthread_mutex_unlock(&mMutex);
2090 return rc;
2091 }
2092
Emilian Peev0f3c3162017-03-15 12:57:46 +00002093 if (1 < pdStatCount) {
2094 LOGE("HAL doesn't support multiple PD streams");
2095 pthread_mutex_unlock(&mMutex);
2096 return -EINVAL;
2097 }
2098
2099 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2100 (1 == pdStatCount)) {
2101 LOGE("HAL doesn't support PD streams in HFR mode!");
2102 pthread_mutex_unlock(&mMutex);
2103 return -EINVAL;
2104 }
2105
Thierry Strudel3d639192016-09-09 11:52:26 -07002106 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2107 for (size_t i = 0; i < streamList->num_streams; i++) {
2108 camera3_stream_t *newStream = streamList->streams[i];
2109 LOGH("newStream type = %d, stream format = %d "
2110 "stream size : %d x %d, stream rotation = %d",
2111 newStream->stream_type, newStream->format,
2112 newStream->width, newStream->height, newStream->rotation);
2113 //if the stream is in the mStreamList validate it
2114 bool stream_exists = false;
2115 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2116 it != mStreamInfo.end(); it++) {
2117 if ((*it)->stream == newStream) {
2118 QCamera3ProcessingChannel *channel =
2119 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2120 stream_exists = true;
2121 if (channel)
2122 delete channel;
2123 (*it)->status = VALID;
2124 (*it)->stream->priv = NULL;
2125 (*it)->channel = NULL;
2126 }
2127 }
2128 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2129 //new stream
2130 stream_info_t* stream_info;
2131 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2132 if (!stream_info) {
2133 LOGE("Could not allocate stream info");
2134 rc = -ENOMEM;
2135 pthread_mutex_unlock(&mMutex);
2136 return rc;
2137 }
2138 stream_info->stream = newStream;
2139 stream_info->status = VALID;
2140 stream_info->channel = NULL;
2141 mStreamInfo.push_back(stream_info);
2142 }
2143 /* Covers Opaque ZSL and API1 F/W ZSL */
2144 if (IS_USAGE_ZSL(newStream->usage)
2145 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2146 if (zslStream != NULL) {
2147 LOGE("Multiple input/reprocess streams requested!");
2148 pthread_mutex_unlock(&mMutex);
2149 return BAD_VALUE;
2150 }
2151 zslStream = newStream;
2152 }
2153 /* Covers YUV reprocess */
2154 if (inputStream != NULL) {
2155 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2156 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2157 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2158 && inputStream->width == newStream->width
2159 && inputStream->height == newStream->height) {
2160 if (zslStream != NULL) {
2161 /* This scenario indicates multiple YUV streams with same size
2162 * as input stream have been requested, since zsl stream handle
2163 * is solely use for the purpose of overriding the size of streams
2164 * which share h/w streams we will just make a guess here as to
2165 * which of the stream is a ZSL stream, this will be refactored
2166 * once we make generic logic for streams sharing encoder output
2167 */
2168 LOGH("Warning, Multiple ip/reprocess streams requested!");
2169 }
2170 zslStream = newStream;
2171 }
2172 }
2173 }
2174
2175 /* If a zsl stream is set, we know that we have configured at least one input or
2176 bidirectional stream */
2177 if (NULL != zslStream) {
2178 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2179 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2180 mInputStreamInfo.format = zslStream->format;
2181 mInputStreamInfo.usage = zslStream->usage;
2182 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2183 mInputStreamInfo.dim.width,
2184 mInputStreamInfo.dim.height,
2185 mInputStreamInfo.format, mInputStreamInfo.usage);
2186 }
2187
2188 cleanAndSortStreamInfo();
2189 if (mMetadataChannel) {
2190 delete mMetadataChannel;
2191 mMetadataChannel = NULL;
2192 }
2193 if (mSupportChannel) {
2194 delete mSupportChannel;
2195 mSupportChannel = NULL;
2196 }
2197
2198 if (mAnalysisChannel) {
2199 delete mAnalysisChannel;
2200 mAnalysisChannel = NULL;
2201 }
2202
2203 if (mDummyBatchChannel) {
2204 delete mDummyBatchChannel;
2205 mDummyBatchChannel = NULL;
2206 }
2207
Emilian Peev7650c122017-01-19 08:24:33 -08002208 if (mDepthChannel) {
2209 mDepthChannel = NULL;
2210 }
2211
Thierry Strudel2896d122017-02-23 19:18:03 -08002212 char is_type_value[PROPERTY_VALUE_MAX];
2213 property_get("persist.camera.is_type", is_type_value, "4");
2214 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2215
Thierry Strudel3d639192016-09-09 11:52:26 -07002216 //Create metadata channel and initialize it
2217 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2218 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2219 gCamCapability[mCameraId]->color_arrangement);
2220 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2221 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002222 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002223 if (mMetadataChannel == NULL) {
2224 LOGE("failed to allocate metadata channel");
2225 rc = -ENOMEM;
2226 pthread_mutex_unlock(&mMutex);
2227 return rc;
2228 }
2229 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2230 if (rc < 0) {
2231 LOGE("metadata channel initialization failed");
2232 delete mMetadataChannel;
2233 mMetadataChannel = NULL;
2234 pthread_mutex_unlock(&mMutex);
2235 return rc;
2236 }
2237
Thierry Strudel2896d122017-02-23 19:18:03 -08002238 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002239 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002240 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002241 // Keep track of preview/video streams indices.
2242 // There could be more than one preview streams, but only one video stream.
2243 int32_t video_stream_idx = -1;
2244 int32_t preview_stream_idx[streamList->num_streams];
2245 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002246 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2247 /* Allocate channel objects for the requested streams */
2248 for (size_t i = 0; i < streamList->num_streams; i++) {
2249 camera3_stream_t *newStream = streamList->streams[i];
2250 uint32_t stream_usage = newStream->usage;
2251 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2252 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2253 struct camera_info *p_info = NULL;
2254 pthread_mutex_lock(&gCamLock);
2255 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2256 pthread_mutex_unlock(&gCamLock);
2257 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2258 || IS_USAGE_ZSL(newStream->usage)) &&
2259 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002260 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002262 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2263 if (bUseCommonFeatureMask)
2264 zsl_ppmask = commonFeatureMask;
2265 else
2266 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 if (numStreamsOnEncoder > 0)
2269 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2270 else
2271 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002273 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002274 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002275 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 LOGH("Input stream configured, reprocess config");
2277 } else {
2278 //for non zsl streams find out the format
2279 switch (newStream->format) {
2280 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2281 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2284 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2285 /* add additional features to pp feature mask */
2286 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2287 mStreamConfigInfo.num_streams);
2288
2289 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2290 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2291 CAM_STREAM_TYPE_VIDEO;
2292 if (m_bTnrEnabled && m_bTnrVideo) {
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2294 CAM_QCOM_FEATURE_CPP_TNR;
2295 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2297 ~CAM_QCOM_FEATURE_CDS;
2298 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002299 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2300 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2301 CAM_QTI_FEATURE_PPEISCORE;
2302 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002303 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 } else {
2305 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2306 CAM_STREAM_TYPE_PREVIEW;
2307 if (m_bTnrEnabled && m_bTnrPreview) {
2308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2309 CAM_QCOM_FEATURE_CPP_TNR;
2310 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2311 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2312 ~CAM_QCOM_FEATURE_CDS;
2313 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002314 if(!m_bSwTnrPreview) {
2315 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2316 ~CAM_QTI_FEATURE_SW_TNR;
2317 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002318 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002319 padding_info.width_padding = mSurfaceStridePadding;
2320 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002321 previewSize.width = (int32_t)newStream->width;
2322 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 }
2324 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2325 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2326 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2327 newStream->height;
2328 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2329 newStream->width;
2330 }
2331 }
2332 break;
2333 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002334 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002335 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2336 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2337 if (bUseCommonFeatureMask)
2338 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2339 commonFeatureMask;
2340 else
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2342 CAM_QCOM_FEATURE_NONE;
2343 } else {
2344 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2345 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2346 }
2347 break;
2348 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002349 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2351 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2352 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2353 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2354 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002355 /* Remove rotation if it is not supported
2356 for 4K LiveVideo snapshot case (online processing) */
2357 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2358 CAM_QCOM_FEATURE_ROTATION)) {
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2360 &= ~CAM_QCOM_FEATURE_ROTATION;
2361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 } else {
2363 if (bUseCommonFeatureMask &&
2364 isOnEncoder(maxViewfinderSize, newStream->width,
2365 newStream->height)) {
2366 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2367 } else {
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2369 }
2370 }
2371 if (isZsl) {
2372 if (zslStream) {
2373 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2374 (int32_t)zslStream->width;
2375 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2376 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2378 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 } else {
2380 LOGE("Error, No ZSL stream identified");
2381 pthread_mutex_unlock(&mMutex);
2382 return -EINVAL;
2383 }
2384 } else if (m_bIs4KVideo) {
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2386 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2387 } else if (bYuv888OverrideJpeg) {
2388 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2389 (int32_t)largeYuv888Size.width;
2390 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2391 (int32_t)largeYuv888Size.height;
2392 }
2393 break;
2394 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2395 case HAL_PIXEL_FORMAT_RAW16:
2396 case HAL_PIXEL_FORMAT_RAW10:
2397 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2399 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002400 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2401 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2402 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2403 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2404 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2405 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2406 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2407 gCamCapability[mCameraId]->dt[mPDIndex];
2408 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2409 gCamCapability[mCameraId]->vc[mPDIndex];
2410 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 break;
2412 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002413 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002414 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2415 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2416 break;
2417 }
2418 }
2419
2420 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2421 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2422 gCamCapability[mCameraId]->color_arrangement);
2423
2424 if (newStream->priv == NULL) {
2425 //New stream, construct channel
2426 switch (newStream->stream_type) {
2427 case CAMERA3_STREAM_INPUT:
2428 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2429 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2430 break;
2431 case CAMERA3_STREAM_BIDIRECTIONAL:
2432 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2433 GRALLOC_USAGE_HW_CAMERA_WRITE;
2434 break;
2435 case CAMERA3_STREAM_OUTPUT:
2436 /* For video encoding stream, set read/write rarely
2437 * flag so that they may be set to un-cached */
2438 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2439 newStream->usage |=
2440 (GRALLOC_USAGE_SW_READ_RARELY |
2441 GRALLOC_USAGE_SW_WRITE_RARELY |
2442 GRALLOC_USAGE_HW_CAMERA_WRITE);
2443 else if (IS_USAGE_ZSL(newStream->usage))
2444 {
2445 LOGD("ZSL usage flag skipping");
2446 }
2447 else if (newStream == zslStream
2448 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2449 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2450 } else
2451 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2452 break;
2453 default:
2454 LOGE("Invalid stream_type %d", newStream->stream_type);
2455 break;
2456 }
2457
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002458 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002459 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2460 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2461 QCamera3ProcessingChannel *channel = NULL;
2462 switch (newStream->format) {
2463 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2464 if ((newStream->usage &
2465 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2466 (streamList->operation_mode ==
2467 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2468 ) {
2469 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2470 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002471 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002472 this,
2473 newStream,
2474 (cam_stream_type_t)
2475 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2477 mMetadataChannel,
2478 0); //heap buffers are not required for HFR video channel
2479 if (channel == NULL) {
2480 LOGE("allocation of channel failed");
2481 pthread_mutex_unlock(&mMutex);
2482 return -ENOMEM;
2483 }
2484 //channel->getNumBuffers() will return 0 here so use
2485 //MAX_INFLIGH_HFR_REQUESTS
2486 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2487 newStream->priv = channel;
2488 LOGI("num video buffers in HFR mode: %d",
2489 MAX_INFLIGHT_HFR_REQUESTS);
2490 } else {
2491 /* Copy stream contents in HFR preview only case to create
2492 * dummy batch channel so that sensor streaming is in
2493 * HFR mode */
2494 if (!m_bIsVideo && (streamList->operation_mode ==
2495 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2496 mDummyBatchStream = *newStream;
2497 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002498 int bufferCount = MAX_INFLIGHT_REQUESTS;
2499 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2500 CAM_STREAM_TYPE_VIDEO) {
2501 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2502 bufferCount = MAX_VIDEO_BUFFERS;
2503 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002504 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2505 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002506 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002507 this,
2508 newStream,
2509 (cam_stream_type_t)
2510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2511 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2512 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 if (channel == NULL) {
2515 LOGE("allocation of channel failed");
2516 pthread_mutex_unlock(&mMutex);
2517 return -ENOMEM;
2518 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002519 /* disable UBWC for preview, though supported,
2520 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002521 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002522 (previewSize.width == (int32_t)videoWidth)&&
2523 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002524 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002525 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002526 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002527 newStream->max_buffers = channel->getNumBuffers();
2528 newStream->priv = channel;
2529 }
2530 break;
2531 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2532 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2533 mChannelHandle,
2534 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002535 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002536 this,
2537 newStream,
2538 (cam_stream_type_t)
2539 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2540 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2541 mMetadataChannel);
2542 if (channel == NULL) {
2543 LOGE("allocation of YUV channel failed");
2544 pthread_mutex_unlock(&mMutex);
2545 return -ENOMEM;
2546 }
2547 newStream->max_buffers = channel->getNumBuffers();
2548 newStream->priv = channel;
2549 break;
2550 }
2551 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2552 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002553 case HAL_PIXEL_FORMAT_RAW10: {
2554 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2555 (HAL_DATASPACE_DEPTH != newStream->data_space))
2556 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 mRawChannel = new QCamera3RawChannel(
2558 mCameraHandle->camera_handle, mChannelHandle,
2559 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002560 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 this, newStream,
2562 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002563 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 if (mRawChannel == NULL) {
2565 LOGE("allocation of raw channel failed");
2566 pthread_mutex_unlock(&mMutex);
2567 return -ENOMEM;
2568 }
2569 newStream->max_buffers = mRawChannel->getNumBuffers();
2570 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2571 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002573 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002574 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2575 mDepthChannel = new QCamera3DepthChannel(
2576 mCameraHandle->camera_handle, mChannelHandle,
2577 mCameraHandle->ops, NULL, NULL, &padding_info,
2578 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2579 mMetadataChannel);
2580 if (NULL == mDepthChannel) {
2581 LOGE("Allocation of depth channel failed");
2582 pthread_mutex_unlock(&mMutex);
2583 return NO_MEMORY;
2584 }
2585 newStream->priv = mDepthChannel;
2586 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2587 } else {
2588 // Max live snapshot inflight buffer is 1. This is to mitigate
2589 // frame drop issues for video snapshot. The more buffers being
2590 // allocated, the more frame drops there are.
2591 mPictureChannel = new QCamera3PicChannel(
2592 mCameraHandle->camera_handle, mChannelHandle,
2593 mCameraHandle->ops, captureResultCb,
2594 setBufferErrorStatus, &padding_info, this, newStream,
2595 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2596 m_bIs4KVideo, isZsl, mMetadataChannel,
2597 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2598 if (mPictureChannel == NULL) {
2599 LOGE("allocation of channel failed");
2600 pthread_mutex_unlock(&mMutex);
2601 return -ENOMEM;
2602 }
2603 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2604 newStream->max_buffers = mPictureChannel->getNumBuffers();
2605 mPictureChannel->overrideYuvSize(
2606 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2607 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002609 break;
2610
2611 default:
2612 LOGE("not a supported format 0x%x", newStream->format);
2613 break;
2614 }
2615 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2616 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2617 } else {
2618 LOGE("Error, Unknown stream type");
2619 pthread_mutex_unlock(&mMutex);
2620 return -EINVAL;
2621 }
2622
2623 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002624 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2625 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002626 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002627 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002628 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2629 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2630 }
2631 }
2632
2633 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2634 it != mStreamInfo.end(); it++) {
2635 if ((*it)->stream == newStream) {
2636 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2637 break;
2638 }
2639 }
2640 } else {
2641 // Channel already exists for this stream
2642 // Do nothing for now
2643 }
2644 padding_info = gCamCapability[mCameraId]->padding_info;
2645
Emilian Peev7650c122017-01-19 08:24:33 -08002646 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002647 * since there is no real stream associated with it
2648 */
Emilian Peev7650c122017-01-19 08:24:33 -08002649 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002650 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2651 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 }
2655
Binhao Lincdb362a2017-04-20 13:31:54 -07002656 // By default, preview stream TNR is disabled.
2657 // Enable TNR to the preview stream if all conditions below are satisfied:
2658 // 1. resolution <= 1080p.
2659 // 2. preview resolution == video resolution.
2660 // 3. video stream TNR is enabled.
2661 // 4. EIS2.0
2662 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2663 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2664 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2665 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2666 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2667 video_stream->width == preview_stream->width &&
2668 video_stream->height == preview_stream->height) {
2669 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2670 CAM_QCOM_FEATURE_CPP_TNR;
2671 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2672 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2673 ~CAM_QCOM_FEATURE_CDS;
2674 }
2675 }
2676
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2678 onlyRaw = false;
2679 }
2680
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002681 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002682 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002683 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002684 cam_analysis_info_t analysisInfo;
2685 int32_t ret = NO_ERROR;
2686 ret = mCommon.getAnalysisInfo(
2687 FALSE,
2688 analysisFeatureMask,
2689 &analysisInfo);
2690 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002691 cam_color_filter_arrangement_t analysis_color_arrangement =
2692 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2693 CAM_FILTER_ARRANGEMENT_Y :
2694 gCamCapability[mCameraId]->color_arrangement);
2695 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2696 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002697 cam_dimension_t analysisDim;
2698 analysisDim = mCommon.getMatchingDimension(previewSize,
2699 analysisInfo.analysis_recommended_res);
2700
2701 mAnalysisChannel = new QCamera3SupportChannel(
2702 mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops,
2705 &analysisInfo.analysis_padding_info,
2706 analysisFeatureMask,
2707 CAM_STREAM_TYPE_ANALYSIS,
2708 &analysisDim,
2709 (analysisInfo.analysis_format
2710 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2711 : CAM_FORMAT_YUV_420_NV21),
2712 analysisInfo.hw_analysis_supported,
2713 gCamCapability[mCameraId]->color_arrangement,
2714 this,
2715 0); // force buffer count to 0
2716 } else {
2717 LOGW("getAnalysisInfo failed, ret = %d", ret);
2718 }
2719 if (!mAnalysisChannel) {
2720 LOGW("Analysis channel cannot be created");
2721 }
2722 }
2723
Thierry Strudel3d639192016-09-09 11:52:26 -07002724 //RAW DUMP channel
2725 if (mEnableRawDump && isRawStreamRequested == false){
2726 cam_dimension_t rawDumpSize;
2727 rawDumpSize = getMaxRawSize(mCameraId);
2728 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2729 setPAAFSupport(rawDumpFeatureMask,
2730 CAM_STREAM_TYPE_RAW,
2731 gCamCapability[mCameraId]->color_arrangement);
2732 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2733 mChannelHandle,
2734 mCameraHandle->ops,
2735 rawDumpSize,
2736 &padding_info,
2737 this, rawDumpFeatureMask);
2738 if (!mRawDumpChannel) {
2739 LOGE("Raw Dump channel cannot be created");
2740 pthread_mutex_unlock(&mMutex);
2741 return -ENOMEM;
2742 }
2743 }
2744
Thierry Strudel3d639192016-09-09 11:52:26 -07002745 if (mAnalysisChannel) {
2746 cam_analysis_info_t analysisInfo;
2747 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2748 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2749 CAM_STREAM_TYPE_ANALYSIS;
2750 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2751 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002752 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002753 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2754 &analysisInfo);
2755 if (rc != NO_ERROR) {
2756 LOGE("getAnalysisInfo failed, ret = %d", rc);
2757 pthread_mutex_unlock(&mMutex);
2758 return rc;
2759 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002760 cam_color_filter_arrangement_t analysis_color_arrangement =
2761 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2762 CAM_FILTER_ARRANGEMENT_Y :
2763 gCamCapability[mCameraId]->color_arrangement);
2764 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2765 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2766 analysis_color_arrangement);
2767
Thierry Strudel3d639192016-09-09 11:52:26 -07002768 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002769 mCommon.getMatchingDimension(previewSize,
2770 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002771 mStreamConfigInfo.num_streams++;
2772 }
2773
Thierry Strudel2896d122017-02-23 19:18:03 -08002774 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002775 cam_analysis_info_t supportInfo;
2776 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2777 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2778 setPAAFSupport(callbackFeatureMask,
2779 CAM_STREAM_TYPE_CALLBACK,
2780 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002781 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002782 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002783 if (ret != NO_ERROR) {
2784 /* Ignore the error for Mono camera
2785 * because the PAAF bit mask is only set
2786 * for CAM_STREAM_TYPE_ANALYSIS stream type
2787 */
2788 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2789 LOGW("getAnalysisInfo failed, ret = %d", ret);
2790 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 }
2792 mSupportChannel = new QCamera3SupportChannel(
2793 mCameraHandle->camera_handle,
2794 mChannelHandle,
2795 mCameraHandle->ops,
2796 &gCamCapability[mCameraId]->padding_info,
2797 callbackFeatureMask,
2798 CAM_STREAM_TYPE_CALLBACK,
2799 &QCamera3SupportChannel::kDim,
2800 CAM_FORMAT_YUV_420_NV21,
2801 supportInfo.hw_analysis_supported,
2802 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002803 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if (!mSupportChannel) {
2805 LOGE("dummy channel cannot be created");
2806 pthread_mutex_unlock(&mMutex);
2807 return -ENOMEM;
2808 }
2809 }
2810
2811 if (mSupportChannel) {
2812 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2813 QCamera3SupportChannel::kDim;
2814 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2815 CAM_STREAM_TYPE_CALLBACK;
2816 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2817 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2818 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2819 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2820 gCamCapability[mCameraId]->color_arrangement);
2821 mStreamConfigInfo.num_streams++;
2822 }
2823
2824 if (mRawDumpChannel) {
2825 cam_dimension_t rawSize;
2826 rawSize = getMaxRawSize(mCameraId);
2827 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2828 rawSize;
2829 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2830 CAM_STREAM_TYPE_RAW;
2831 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2832 CAM_QCOM_FEATURE_NONE;
2833 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2834 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2835 gCamCapability[mCameraId]->color_arrangement);
2836 mStreamConfigInfo.num_streams++;
2837 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002838
2839 if (mHdrPlusRawSrcChannel) {
2840 cam_dimension_t rawSize;
2841 rawSize = getMaxRawSize(mCameraId);
2842 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2843 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2844 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2845 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2846 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2847 gCamCapability[mCameraId]->color_arrangement);
2848 mStreamConfigInfo.num_streams++;
2849 }
2850
Thierry Strudel3d639192016-09-09 11:52:26 -07002851 /* In HFR mode, if video stream is not added, create a dummy channel so that
2852 * ISP can create a batch mode even for preview only case. This channel is
2853 * never 'start'ed (no stream-on), it is only 'initialized' */
2854 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2855 !m_bIsVideo) {
2856 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2857 setPAAFSupport(dummyFeatureMask,
2858 CAM_STREAM_TYPE_VIDEO,
2859 gCamCapability[mCameraId]->color_arrangement);
2860 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002863 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002864 this,
2865 &mDummyBatchStream,
2866 CAM_STREAM_TYPE_VIDEO,
2867 dummyFeatureMask,
2868 mMetadataChannel);
2869 if (NULL == mDummyBatchChannel) {
2870 LOGE("creation of mDummyBatchChannel failed."
2871 "Preview will use non-hfr sensor mode ");
2872 }
2873 }
2874 if (mDummyBatchChannel) {
2875 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2876 mDummyBatchStream.width;
2877 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2878 mDummyBatchStream.height;
2879 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2880 CAM_STREAM_TYPE_VIDEO;
2881 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2882 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2883 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2885 gCamCapability[mCameraId]->color_arrangement);
2886 mStreamConfigInfo.num_streams++;
2887 }
2888
2889 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2890 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002891 m_bIs4KVideo ? 0 :
2892 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002893
2894 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2895 for (pendingRequestIterator i = mPendingRequestsList.begin();
2896 i != mPendingRequestsList.end();) {
2897 i = erasePendingRequest(i);
2898 }
2899 mPendingFrameDropList.clear();
2900 // Initialize/Reset the pending buffers list
2901 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2902 req.mPendingBufferList.clear();
2903 }
2904 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2905
Thierry Strudel3d639192016-09-09 11:52:26 -07002906 mCurJpegMeta.clear();
2907 //Get min frame duration for this streams configuration
2908 deriveMinFrameDuration();
2909
Chien-Yu Chenee335912017-02-09 17:53:20 -08002910 mFirstPreviewIntentSeen = false;
2911
2912 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002913 {
2914 Mutex::Autolock l(gHdrPlusClientLock);
2915 disableHdrPlusModeLocked();
2916 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002917
Thierry Strudel3d639192016-09-09 11:52:26 -07002918 // Update state
2919 mState = CONFIGURED;
2920
2921 pthread_mutex_unlock(&mMutex);
2922
2923 return rc;
2924}
2925
2926/*===========================================================================
2927 * FUNCTION : validateCaptureRequest
2928 *
2929 * DESCRIPTION: validate a capture request from camera service
2930 *
2931 * PARAMETERS :
2932 * @request : request from framework to process
2933 *
2934 * RETURN :
2935 *
2936 *==========================================================================*/
2937int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002938 camera3_capture_request_t *request,
2939 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002940{
2941 ssize_t idx = 0;
2942 const camera3_stream_buffer_t *b;
2943 CameraMetadata meta;
2944
2945 /* Sanity check the request */
2946 if (request == NULL) {
2947 LOGE("NULL capture request");
2948 return BAD_VALUE;
2949 }
2950
2951 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2952 /*settings cannot be null for the first request*/
2953 return BAD_VALUE;
2954 }
2955
2956 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002957 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2958 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002959 LOGE("Request %d: No output buffers provided!",
2960 __FUNCTION__, frameNumber);
2961 return BAD_VALUE;
2962 }
2963 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2964 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2965 request->num_output_buffers, MAX_NUM_STREAMS);
2966 return BAD_VALUE;
2967 }
2968 if (request->input_buffer != NULL) {
2969 b = request->input_buffer;
2970 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2971 LOGE("Request %d: Buffer %ld: Status not OK!",
2972 frameNumber, (long)idx);
2973 return BAD_VALUE;
2974 }
2975 if (b->release_fence != -1) {
2976 LOGE("Request %d: Buffer %ld: Has a release fence!",
2977 frameNumber, (long)idx);
2978 return BAD_VALUE;
2979 }
2980 if (b->buffer == NULL) {
2981 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2982 frameNumber, (long)idx);
2983 return BAD_VALUE;
2984 }
2985 }
2986
2987 // Validate all buffers
2988 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002989 if (b == NULL) {
2990 return BAD_VALUE;
2991 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002992 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002993 QCamera3ProcessingChannel *channel =
2994 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2995 if (channel == NULL) {
2996 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2997 frameNumber, (long)idx);
2998 return BAD_VALUE;
2999 }
3000 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3001 LOGE("Request %d: Buffer %ld: Status not OK!",
3002 frameNumber, (long)idx);
3003 return BAD_VALUE;
3004 }
3005 if (b->release_fence != -1) {
3006 LOGE("Request %d: Buffer %ld: Has a release fence!",
3007 frameNumber, (long)idx);
3008 return BAD_VALUE;
3009 }
3010 if (b->buffer == NULL) {
3011 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3012 frameNumber, (long)idx);
3013 return BAD_VALUE;
3014 }
3015 if (*(b->buffer) == NULL) {
3016 LOGE("Request %d: Buffer %ld: NULL private handle!",
3017 frameNumber, (long)idx);
3018 return BAD_VALUE;
3019 }
3020 idx++;
3021 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003022 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003023 return NO_ERROR;
3024}
3025
3026/*===========================================================================
3027 * FUNCTION : deriveMinFrameDuration
3028 *
3029 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3030 * on currently configured streams.
3031 *
3032 * PARAMETERS : NONE
3033 *
3034 * RETURN : NONE
3035 *
3036 *==========================================================================*/
3037void QCamera3HardwareInterface::deriveMinFrameDuration()
3038{
3039 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3040
3041 maxJpegDim = 0;
3042 maxProcessedDim = 0;
3043 maxRawDim = 0;
3044
3045 // Figure out maximum jpeg, processed, and raw dimensions
3046 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3047 it != mStreamInfo.end(); it++) {
3048
3049 // Input stream doesn't have valid stream_type
3050 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3051 continue;
3052
3053 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3054 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3055 if (dimension > maxJpegDim)
3056 maxJpegDim = dimension;
3057 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3058 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3059 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3060 if (dimension > maxRawDim)
3061 maxRawDim = dimension;
3062 } else {
3063 if (dimension > maxProcessedDim)
3064 maxProcessedDim = dimension;
3065 }
3066 }
3067
3068 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3069 MAX_SIZES_CNT);
3070
3071 //Assume all jpeg dimensions are in processed dimensions.
3072 if (maxJpegDim > maxProcessedDim)
3073 maxProcessedDim = maxJpegDim;
3074 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3075 if (maxProcessedDim > maxRawDim) {
3076 maxRawDim = INT32_MAX;
3077
3078 for (size_t i = 0; i < count; i++) {
3079 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3080 gCamCapability[mCameraId]->raw_dim[i].height;
3081 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3082 maxRawDim = dimension;
3083 }
3084 }
3085
3086 //Find minimum durations for processed, jpeg, and raw
3087 for (size_t i = 0; i < count; i++) {
3088 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3089 gCamCapability[mCameraId]->raw_dim[i].height) {
3090 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3091 break;
3092 }
3093 }
3094 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3095 for (size_t i = 0; i < count; i++) {
3096 if (maxProcessedDim ==
3097 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3098 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3099 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3100 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3101 break;
3102 }
3103 }
3104}
3105
3106/*===========================================================================
3107 * FUNCTION : getMinFrameDuration
3108 *
3109 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3110 * and current request configuration.
3111 *
3112 * PARAMETERS : @request: requset sent by the frameworks
3113 *
3114 * RETURN : min farme duration for a particular request
3115 *
3116 *==========================================================================*/
3117int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3118{
3119 bool hasJpegStream = false;
3120 bool hasRawStream = false;
3121 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3122 const camera3_stream_t *stream = request->output_buffers[i].stream;
3123 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3124 hasJpegStream = true;
3125 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3126 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3127 stream->format == HAL_PIXEL_FORMAT_RAW16)
3128 hasRawStream = true;
3129 }
3130
3131 if (!hasJpegStream)
3132 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3133 else
3134 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3135}
3136
3137/*===========================================================================
3138 * FUNCTION : handleBuffersDuringFlushLock
3139 *
3140 * DESCRIPTION: Account for buffers returned from back-end during flush
3141 * This function is executed while mMutex is held by the caller.
3142 *
3143 * PARAMETERS :
3144 * @buffer: image buffer for the callback
3145 *
3146 * RETURN :
3147 *==========================================================================*/
3148void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3149{
3150 bool buffer_found = false;
3151 for (List<PendingBuffersInRequest>::iterator req =
3152 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3153 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3154 for (List<PendingBufferInfo>::iterator i =
3155 req->mPendingBufferList.begin();
3156 i != req->mPendingBufferList.end(); i++) {
3157 if (i->buffer == buffer->buffer) {
3158 mPendingBuffersMap.numPendingBufsAtFlush--;
3159 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3160 buffer->buffer, req->frame_number,
3161 mPendingBuffersMap.numPendingBufsAtFlush);
3162 buffer_found = true;
3163 break;
3164 }
3165 }
3166 if (buffer_found) {
3167 break;
3168 }
3169 }
3170 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3171 //signal the flush()
3172 LOGD("All buffers returned to HAL. Continue flush");
3173 pthread_cond_signal(&mBuffersCond);
3174 }
3175}
3176
Thierry Strudel3d639192016-09-09 11:52:26 -07003177/*===========================================================================
3178 * FUNCTION : handleBatchMetadata
3179 *
3180 * DESCRIPTION: Handles metadata buffer callback in batch mode
3181 *
3182 * PARAMETERS : @metadata_buf: metadata buffer
3183 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3184 * the meta buf in this method
3185 *
3186 * RETURN :
3187 *
3188 *==========================================================================*/
3189void QCamera3HardwareInterface::handleBatchMetadata(
3190 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3191{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003192 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003193
3194 if (NULL == metadata_buf) {
3195 LOGE("metadata_buf is NULL");
3196 return;
3197 }
3198 /* In batch mode, the metdata will contain the frame number and timestamp of
3199 * the last frame in the batch. Eg: a batch containing buffers from request
3200 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3201 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3202 * multiple process_capture_results */
3203 metadata_buffer_t *metadata =
3204 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3205 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3206 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3207 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3208 uint32_t frame_number = 0, urgent_frame_number = 0;
3209 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3210 bool invalid_metadata = false;
3211 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3212 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003213 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003214
3215 int32_t *p_frame_number_valid =
3216 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3217 uint32_t *p_frame_number =
3218 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3219 int64_t *p_capture_time =
3220 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3221 int32_t *p_urgent_frame_number_valid =
3222 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3223 uint32_t *p_urgent_frame_number =
3224 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3225
3226 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3227 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3228 (NULL == p_urgent_frame_number)) {
3229 LOGE("Invalid metadata");
3230 invalid_metadata = true;
3231 } else {
3232 frame_number_valid = *p_frame_number_valid;
3233 last_frame_number = *p_frame_number;
3234 last_frame_capture_time = *p_capture_time;
3235 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3236 last_urgent_frame_number = *p_urgent_frame_number;
3237 }
3238
3239 /* In batchmode, when no video buffers are requested, set_parms are sent
3240 * for every capture_request. The difference between consecutive urgent
3241 * frame numbers and frame numbers should be used to interpolate the
3242 * corresponding frame numbers and time stamps */
3243 pthread_mutex_lock(&mMutex);
3244 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003245 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3246 if(idx < 0) {
3247 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3248 last_urgent_frame_number);
3249 mState = ERROR;
3250 pthread_mutex_unlock(&mMutex);
3251 return;
3252 }
3253 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003254 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3255 first_urgent_frame_number;
3256
3257 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3258 urgent_frame_number_valid,
3259 first_urgent_frame_number, last_urgent_frame_number);
3260 }
3261
3262 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003263 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3264 if(idx < 0) {
3265 LOGE("Invalid frame number received: %d. Irrecoverable error",
3266 last_frame_number);
3267 mState = ERROR;
3268 pthread_mutex_unlock(&mMutex);
3269 return;
3270 }
3271 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 frameNumDiff = last_frame_number + 1 -
3273 first_frame_number;
3274 mPendingBatchMap.removeItem(last_frame_number);
3275
3276 LOGD("frm: valid: %d frm_num: %d - %d",
3277 frame_number_valid,
3278 first_frame_number, last_frame_number);
3279
3280 }
3281 pthread_mutex_unlock(&mMutex);
3282
3283 if (urgent_frame_number_valid || frame_number_valid) {
3284 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3285 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3286 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3287 urgentFrameNumDiff, last_urgent_frame_number);
3288 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3289 LOGE("frameNumDiff: %d frameNum: %d",
3290 frameNumDiff, last_frame_number);
3291 }
3292
3293 for (size_t i = 0; i < loopCount; i++) {
3294 /* handleMetadataWithLock is called even for invalid_metadata for
3295 * pipeline depth calculation */
3296 if (!invalid_metadata) {
3297 /* Infer frame number. Batch metadata contains frame number of the
3298 * last frame */
3299 if (urgent_frame_number_valid) {
3300 if (i < urgentFrameNumDiff) {
3301 urgent_frame_number =
3302 first_urgent_frame_number + i;
3303 LOGD("inferred urgent frame_number: %d",
3304 urgent_frame_number);
3305 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3306 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3307 } else {
3308 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3309 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3310 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3311 }
3312 }
3313
3314 /* Infer frame number. Batch metadata contains frame number of the
3315 * last frame */
3316 if (frame_number_valid) {
3317 if (i < frameNumDiff) {
3318 frame_number = first_frame_number + i;
3319 LOGD("inferred frame_number: %d", frame_number);
3320 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3321 CAM_INTF_META_FRAME_NUMBER, frame_number);
3322 } else {
3323 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3324 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3325 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3326 }
3327 }
3328
3329 if (last_frame_capture_time) {
3330 //Infer timestamp
3331 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003332 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003333 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003334 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003335 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3336 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3337 LOGD("batch capture_time: %lld, capture_time: %lld",
3338 last_frame_capture_time, capture_time);
3339 }
3340 }
3341 pthread_mutex_lock(&mMutex);
3342 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003343 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003344 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3345 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003346 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003347 pthread_mutex_unlock(&mMutex);
3348 }
3349
3350 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003351 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 mMetadataChannel->bufDone(metadata_buf);
3353 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003354 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003355 }
3356}
3357
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003358void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3359 camera3_error_msg_code_t errorCode)
3360{
3361 camera3_notify_msg_t notify_msg;
3362 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3363 notify_msg.type = CAMERA3_MSG_ERROR;
3364 notify_msg.message.error.error_code = errorCode;
3365 notify_msg.message.error.error_stream = NULL;
3366 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003367 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003368
3369 return;
3370}
Thierry Strudel3d639192016-09-09 11:52:26 -07003371/*===========================================================================
3372 * FUNCTION : handleMetadataWithLock
3373 *
3374 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3375 *
3376 * PARAMETERS : @metadata_buf: metadata buffer
3377 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3378 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003379 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3380 * last urgent metadata in a batch. Always true for non-batch mode
3381 * @lastMetadataInBatch: Boolean to indicate whether this is the
3382 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003383 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3384 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 *
3386 * RETURN :
3387 *
3388 *==========================================================================*/
3389void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003390 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003391 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3392 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003393{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003394 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003395 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3396 //during flush do not send metadata from this thread
3397 LOGD("not sending metadata during flush or when mState is error");
3398 if (free_and_bufdone_meta_buf) {
3399 mMetadataChannel->bufDone(metadata_buf);
3400 free(metadata_buf);
3401 }
3402 return;
3403 }
3404
3405 //not in flush
3406 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3407 int32_t frame_number_valid, urgent_frame_number_valid;
3408 uint32_t frame_number, urgent_frame_number;
3409 int64_t capture_time;
3410 nsecs_t currentSysTime;
3411
3412 int32_t *p_frame_number_valid =
3413 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3414 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3415 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3416 int32_t *p_urgent_frame_number_valid =
3417 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3418 uint32_t *p_urgent_frame_number =
3419 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3420 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3421 metadata) {
3422 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3423 *p_frame_number_valid, *p_frame_number);
3424 }
3425
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003426 camera_metadata_t *resultMetadata = nullptr;
3427
Thierry Strudel3d639192016-09-09 11:52:26 -07003428 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3429 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3430 LOGE("Invalid metadata");
3431 if (free_and_bufdone_meta_buf) {
3432 mMetadataChannel->bufDone(metadata_buf);
3433 free(metadata_buf);
3434 }
3435 goto done_metadata;
3436 }
3437 frame_number_valid = *p_frame_number_valid;
3438 frame_number = *p_frame_number;
3439 capture_time = *p_capture_time;
3440 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3441 urgent_frame_number = *p_urgent_frame_number;
3442 currentSysTime = systemTime(CLOCK_MONOTONIC);
3443
3444 // Detect if buffers from any requests are overdue
3445 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003446 int64_t timeout;
3447 {
3448 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3449 // If there is a pending HDR+ request, the following requests may be blocked until the
3450 // HDR+ request is done. So allow a longer timeout.
3451 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3452 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3453 }
3454
3455 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003456 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003457 assert(missed.stream->priv);
3458 if (missed.stream->priv) {
3459 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3460 assert(ch->mStreams[0]);
3461 if (ch->mStreams[0]) {
3462 LOGE("Cancel missing frame = %d, buffer = %p,"
3463 "stream type = %d, stream format = %d",
3464 req.frame_number, missed.buffer,
3465 ch->mStreams[0]->getMyType(), missed.stream->format);
3466 ch->timeoutFrame(req.frame_number);
3467 }
3468 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003469 }
3470 }
3471 }
3472 //Partial result on process_capture_result for timestamp
3473 if (urgent_frame_number_valid) {
3474 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3475 urgent_frame_number, capture_time);
3476
3477 //Recieved an urgent Frame Number, handle it
3478 //using partial results
3479 for (pendingRequestIterator i =
3480 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3481 LOGD("Iterator Frame = %d urgent frame = %d",
3482 i->frame_number, urgent_frame_number);
3483
3484 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3485 (i->partial_result_cnt == 0)) {
3486 LOGE("Error: HAL missed urgent metadata for frame number %d",
3487 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003488 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003489 }
3490
3491 if (i->frame_number == urgent_frame_number &&
3492 i->bUrgentReceived == 0) {
3493
3494 camera3_capture_result_t result;
3495 memset(&result, 0, sizeof(camera3_capture_result_t));
3496
3497 i->partial_result_cnt++;
3498 i->bUrgentReceived = 1;
3499 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003500 result.result = translateCbUrgentMetadataToResultMetadata(
3501 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003502 // Populate metadata result
3503 result.frame_number = urgent_frame_number;
3504 result.num_output_buffers = 0;
3505 result.output_buffers = NULL;
3506 result.partial_result = i->partial_result_cnt;
3507
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003508 {
3509 Mutex::Autolock l(gHdrPlusClientLock);
3510 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3511 // Notify HDR+ client about the partial metadata.
3512 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3513 result.partial_result == PARTIAL_RESULT_COUNT);
3514 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003515 }
3516
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003517 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003518 LOGD("urgent frame_number = %u, capture_time = %lld",
3519 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003520 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3521 // Instant AEC settled for this frame.
3522 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3523 mInstantAECSettledFrameNumber = urgent_frame_number;
3524 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003525 free_camera_metadata((camera_metadata_t *)result.result);
3526 break;
3527 }
3528 }
3529 }
3530
3531 if (!frame_number_valid) {
3532 LOGD("Not a valid normal frame number, used as SOF only");
3533 if (free_and_bufdone_meta_buf) {
3534 mMetadataChannel->bufDone(metadata_buf);
3535 free(metadata_buf);
3536 }
3537 goto done_metadata;
3538 }
3539 LOGH("valid frame_number = %u, capture_time = %lld",
3540 frame_number, capture_time);
3541
Emilian Peev7650c122017-01-19 08:24:33 -08003542 if (metadata->is_depth_data_valid) {
3543 handleDepthDataLocked(metadata->depth_data, frame_number);
3544 }
3545
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003546 // Check whether any stream buffer corresponding to this is dropped or not
3547 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3548 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3549 for (auto & pendingRequest : mPendingRequestsList) {
3550 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3551 mInstantAECSettledFrameNumber)) {
3552 camera3_notify_msg_t notify_msg = {};
3553 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003555 QCamera3ProcessingChannel *channel =
3556 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003557 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003558 if (p_cam_frame_drop) {
3559 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003560 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003561 // Got the stream ID for drop frame.
3562 dropFrame = true;
3563 break;
3564 }
3565 }
3566 } else {
3567 // This is instant AEC case.
3568 // For instant AEC drop the stream untill AEC is settled.
3569 dropFrame = true;
3570 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003571
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003572 if (dropFrame) {
3573 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3574 if (p_cam_frame_drop) {
3575 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003576 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003577 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003578 } else {
3579 // For instant AEC, inform frame drop and frame number
3580 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3581 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 pendingRequest.frame_number, streamID,
3583 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003584 }
3585 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003586 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003587 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003588 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003589 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003590 if (p_cam_frame_drop) {
3591 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003593 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003594 } else {
3595 // For instant AEC, inform frame drop and frame number
3596 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3597 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003598 pendingRequest.frame_number, streamID,
3599 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003600 }
3601 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003603 PendingFrameDrop.stream_ID = streamID;
3604 // Add the Frame drop info to mPendingFrameDropList
3605 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003607 }
3608 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003609 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003610
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003611 for (auto & pendingRequest : mPendingRequestsList) {
3612 // Find the pending request with the frame number.
3613 if (pendingRequest.frame_number == frame_number) {
3614 // Update the sensor timestamp.
3615 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003616
Thierry Strudel3d639192016-09-09 11:52:26 -07003617
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003618 /* Set the timestamp in display metadata so that clients aware of
3619 private_handle such as VT can use this un-modified timestamps.
3620 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003622
Thierry Strudel3d639192016-09-09 11:52:26 -07003623 // Find channel requiring metadata, meaning internal offline postprocess
3624 // is needed.
3625 //TODO: for now, we don't support two streams requiring metadata at the same time.
3626 // (because we are not making copies, and metadata buffer is not reference counted.
3627 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3629 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003630 if (iter->need_metadata) {
3631 internalPproc = true;
3632 QCamera3ProcessingChannel *channel =
3633 (QCamera3ProcessingChannel *)iter->stream->priv;
3634 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003635 if(p_is_metabuf_queued != NULL) {
3636 *p_is_metabuf_queued = true;
3637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003638 break;
3639 }
3640 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641 for (auto itr = pendingRequest.internalRequestList.begin();
3642 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003643 if (itr->need_metadata) {
3644 internalPproc = true;
3645 QCamera3ProcessingChannel *channel =
3646 (QCamera3ProcessingChannel *)itr->stream->priv;
3647 channel->queueReprocMetadata(metadata_buf);
3648 break;
3649 }
3650 }
3651
Thierry Strudel54dc9782017-02-15 12:12:10 -08003652 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003653
3654 bool *enableZsl = nullptr;
3655 if (gExposeEnableZslKey) {
3656 enableZsl = &pendingRequest.enableZsl;
3657 }
3658
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003659 resultMetadata = translateFromHalMetadata(metadata,
3660 pendingRequest.timestamp, pendingRequest.request_id,
3661 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3662 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003663 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003664 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003665 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003666 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003667 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003668 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003670 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003671
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003672 if (pendingRequest.blob_request) {
3673 //Dump tuning metadata if enabled and available
3674 char prop[PROPERTY_VALUE_MAX];
3675 memset(prop, 0, sizeof(prop));
3676 property_get("persist.camera.dumpmetadata", prop, "0");
3677 int32_t enabled = atoi(prop);
3678 if (enabled && metadata->is_tuning_params_valid) {
3679 dumpMetadataToFile(metadata->tuning_params,
3680 mMetaFrameCount,
3681 enabled,
3682 "Snapshot",
3683 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 }
3685 }
3686
3687 if (!internalPproc) {
3688 LOGD("couldn't find need_metadata for this metadata");
3689 // Return metadata buffer
3690 if (free_and_bufdone_meta_buf) {
3691 mMetadataChannel->bufDone(metadata_buf);
3692 free(metadata_buf);
3693 }
3694 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003695
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 }
3698 }
3699
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 // Try to send out shutter callbacks and capture results.
3701 handlePendingResultsWithLock(frame_number, resultMetadata);
3702 return;
3703
Thierry Strudel3d639192016-09-09 11:52:26 -07003704done_metadata:
3705 for (pendingRequestIterator i = mPendingRequestsList.begin();
3706 i != mPendingRequestsList.end() ;i++) {
3707 i->pipeline_depth++;
3708 }
3709 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3710 unblockRequestIfNecessary();
3711}
3712
3713/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003714 * FUNCTION : handleDepthDataWithLock
3715 *
3716 * DESCRIPTION: Handles incoming depth data
3717 *
3718 * PARAMETERS : @depthData : Depth data
3719 * @frameNumber: Frame number of the incoming depth data
3720 *
3721 * RETURN :
3722 *
3723 *==========================================================================*/
3724void QCamera3HardwareInterface::handleDepthDataLocked(
3725 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3726 uint32_t currentFrameNumber;
3727 buffer_handle_t *depthBuffer;
3728
3729 if (nullptr == mDepthChannel) {
3730 LOGE("Depth channel not present!");
3731 return;
3732 }
3733
3734 camera3_stream_buffer_t resultBuffer =
3735 {.acquire_fence = -1,
3736 .release_fence = -1,
3737 .status = CAMERA3_BUFFER_STATUS_OK,
3738 .buffer = nullptr,
3739 .stream = mDepthChannel->getStream()};
3740 camera3_capture_result_t result =
3741 {.result = nullptr,
3742 .num_output_buffers = 1,
3743 .output_buffers = &resultBuffer,
3744 .partial_result = 0,
3745 .frame_number = 0};
3746
3747 do {
3748 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3749 if (nullptr == depthBuffer) {
3750 break;
3751 }
3752
3753 result.frame_number = currentFrameNumber;
3754 resultBuffer.buffer = depthBuffer;
3755 if (currentFrameNumber == frameNumber) {
3756 int32_t rc = mDepthChannel->populateDepthData(depthData,
3757 frameNumber);
3758 if (NO_ERROR != rc) {
3759 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3760 } else {
3761 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3762 }
3763 } else if (currentFrameNumber > frameNumber) {
3764 break;
3765 } else {
3766 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3767 {{currentFrameNumber, mDepthChannel->getStream(),
3768 CAMERA3_MSG_ERROR_BUFFER}}};
3769 orchestrateNotify(&notify_msg);
3770
3771 LOGE("Depth buffer for frame number: %d is missing "
3772 "returning back!", currentFrameNumber);
3773 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3774 }
3775 mDepthChannel->unmapBuffer(currentFrameNumber);
3776
3777 orchestrateResult(&result);
3778 } while (currentFrameNumber < frameNumber);
3779}
3780
3781/*===========================================================================
3782 * FUNCTION : notifyErrorFoPendingDepthData
3783 *
3784 * DESCRIPTION: Returns error for any pending depth buffers
3785 *
3786 * PARAMETERS : depthCh - depth channel that needs to get flushed
3787 *
3788 * RETURN :
3789 *
3790 *==========================================================================*/
3791void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3792 QCamera3DepthChannel *depthCh) {
3793 uint32_t currentFrameNumber;
3794 buffer_handle_t *depthBuffer;
3795
3796 if (nullptr == depthCh) {
3797 return;
3798 }
3799
3800 camera3_notify_msg_t notify_msg =
3801 {.type = CAMERA3_MSG_ERROR,
3802 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3803 camera3_stream_buffer_t resultBuffer =
3804 {.acquire_fence = -1,
3805 .release_fence = -1,
3806 .buffer = nullptr,
3807 .stream = depthCh->getStream(),
3808 .status = CAMERA3_BUFFER_STATUS_ERROR};
3809 camera3_capture_result_t result =
3810 {.result = nullptr,
3811 .frame_number = 0,
3812 .num_output_buffers = 1,
3813 .partial_result = 0,
3814 .output_buffers = &resultBuffer};
3815
3816 while (nullptr !=
3817 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3818 depthCh->unmapBuffer(currentFrameNumber);
3819
3820 notify_msg.message.error.frame_number = currentFrameNumber;
3821 orchestrateNotify(&notify_msg);
3822
3823 resultBuffer.buffer = depthBuffer;
3824 result.frame_number = currentFrameNumber;
3825 orchestrateResult(&result);
3826 };
3827}
3828
3829/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003830 * FUNCTION : hdrPlusPerfLock
3831 *
3832 * DESCRIPTION: perf lock for HDR+ using custom intent
3833 *
3834 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3835 *
3836 * RETURN : None
3837 *
3838 *==========================================================================*/
3839void QCamera3HardwareInterface::hdrPlusPerfLock(
3840 mm_camera_super_buf_t *metadata_buf)
3841{
3842 if (NULL == metadata_buf) {
3843 LOGE("metadata_buf is NULL");
3844 return;
3845 }
3846 metadata_buffer_t *metadata =
3847 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3848 int32_t *p_frame_number_valid =
3849 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3850 uint32_t *p_frame_number =
3851 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3852
3853 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3854 LOGE("%s: Invalid metadata", __func__);
3855 return;
3856 }
3857
3858 //acquire perf lock for 5 sec after the last HDR frame is captured
3859 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3860 if ((p_frame_number != NULL) &&
3861 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003862 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 }
3864 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003865}
3866
3867/*===========================================================================
3868 * FUNCTION : handleInputBufferWithLock
3869 *
3870 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3871 *
3872 * PARAMETERS : @frame_number: frame number of the input buffer
3873 *
3874 * RETURN :
3875 *
3876 *==========================================================================*/
3877void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3878{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003879 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003880 pendingRequestIterator i = mPendingRequestsList.begin();
3881 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3882 i++;
3883 }
3884 if (i != mPendingRequestsList.end() && i->input_buffer) {
3885 //found the right request
3886 if (!i->shutter_notified) {
3887 CameraMetadata settings;
3888 camera3_notify_msg_t notify_msg;
3889 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3890 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3891 if(i->settings) {
3892 settings = i->settings;
3893 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3894 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3895 } else {
3896 LOGE("No timestamp in input settings! Using current one.");
3897 }
3898 } else {
3899 LOGE("Input settings missing!");
3900 }
3901
3902 notify_msg.type = CAMERA3_MSG_SHUTTER;
3903 notify_msg.message.shutter.frame_number = frame_number;
3904 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003905 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003906 i->shutter_notified = true;
3907 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3908 i->frame_number, notify_msg.message.shutter.timestamp);
3909 }
3910
3911 if (i->input_buffer->release_fence != -1) {
3912 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3913 close(i->input_buffer->release_fence);
3914 if (rc != OK) {
3915 LOGE("input buffer sync wait failed %d", rc);
3916 }
3917 }
3918
3919 camera3_capture_result result;
3920 memset(&result, 0, sizeof(camera3_capture_result));
3921 result.frame_number = frame_number;
3922 result.result = i->settings;
3923 result.input_buffer = i->input_buffer;
3924 result.partial_result = PARTIAL_RESULT_COUNT;
3925
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003926 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003927 LOGD("Input request metadata and input buffer frame_number = %u",
3928 i->frame_number);
3929 i = erasePendingRequest(i);
3930 } else {
3931 LOGE("Could not find input request for frame number %d", frame_number);
3932 }
3933}
3934
3935/*===========================================================================
3936 * FUNCTION : handleBufferWithLock
3937 *
3938 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3939 *
3940 * PARAMETERS : @buffer: image buffer for the callback
3941 * @frame_number: frame number of the image buffer
3942 *
3943 * RETURN :
3944 *
3945 *==========================================================================*/
3946void QCamera3HardwareInterface::handleBufferWithLock(
3947 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3948{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003949 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003950
3951 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3952 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3953 }
3954
Thierry Strudel3d639192016-09-09 11:52:26 -07003955 /* Nothing to be done during error state */
3956 if ((ERROR == mState) || (DEINIT == mState)) {
3957 return;
3958 }
3959 if (mFlushPerf) {
3960 handleBuffersDuringFlushLock(buffer);
3961 return;
3962 }
3963 //not in flush
3964 // If the frame number doesn't exist in the pending request list,
3965 // directly send the buffer to the frameworks, and update pending buffers map
3966 // Otherwise, book-keep the buffer.
3967 pendingRequestIterator i = mPendingRequestsList.begin();
3968 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3969 i++;
3970 }
3971 if (i == mPendingRequestsList.end()) {
3972 // Verify all pending requests frame_numbers are greater
3973 for (pendingRequestIterator j = mPendingRequestsList.begin();
3974 j != mPendingRequestsList.end(); j++) {
3975 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3976 LOGW("Error: pending live frame number %d is smaller than %d",
3977 j->frame_number, frame_number);
3978 }
3979 }
3980 camera3_capture_result_t result;
3981 memset(&result, 0, sizeof(camera3_capture_result_t));
3982 result.result = NULL;
3983 result.frame_number = frame_number;
3984 result.num_output_buffers = 1;
3985 result.partial_result = 0;
3986 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3987 m != mPendingFrameDropList.end(); m++) {
3988 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3989 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3990 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3991 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3992 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3993 frame_number, streamID);
3994 m = mPendingFrameDropList.erase(m);
3995 break;
3996 }
3997 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003998 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003999 result.output_buffers = buffer;
4000 LOGH("result frame_number = %d, buffer = %p",
4001 frame_number, buffer->buffer);
4002
4003 mPendingBuffersMap.removeBuf(buffer->buffer);
4004
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004005 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004006 } else {
4007 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004008 if (i->input_buffer->release_fence != -1) {
4009 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4010 close(i->input_buffer->release_fence);
4011 if (rc != OK) {
4012 LOGE("input buffer sync wait failed %d", rc);
4013 }
4014 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004016
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004017 // Put buffer into the pending request
4018 for (auto &requestedBuffer : i->buffers) {
4019 if (requestedBuffer.stream == buffer->stream) {
4020 if (requestedBuffer.buffer != nullptr) {
4021 LOGE("Error: buffer is already set");
4022 } else {
4023 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4024 sizeof(camera3_stream_buffer_t));
4025 *(requestedBuffer.buffer) = *buffer;
4026 LOGH("cache buffer %p at result frame_number %u",
4027 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004028 }
4029 }
4030 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004031
4032 if (i->input_buffer) {
4033 // For a reprocessing request, try to send out shutter callback and result metadata.
4034 handlePendingResultsWithLock(frame_number, nullptr);
4035 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004036 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004037
4038 if (mPreviewStarted == false) {
4039 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4040 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004041 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4042
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004043 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4044 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4045 mPreviewStarted = true;
4046
4047 // Set power hint for preview
4048 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4049 }
4050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051}
4052
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004053void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4054 const camera_metadata_t *resultMetadata)
4055{
4056 // Find the pending request for this result metadata.
4057 auto requestIter = mPendingRequestsList.begin();
4058 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4059 requestIter++;
4060 }
4061
4062 if (requestIter == mPendingRequestsList.end()) {
4063 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4064 return;
4065 }
4066
4067 // Update the result metadata
4068 requestIter->resultMetadata = resultMetadata;
4069
4070 // Check what type of request this is.
4071 bool liveRequest = false;
4072 if (requestIter->hdrplus) {
4073 // HDR+ request doesn't have partial results.
4074 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4075 } else if (requestIter->input_buffer != nullptr) {
4076 // Reprocessing request result is the same as settings.
4077 requestIter->resultMetadata = requestIter->settings;
4078 // Reprocessing request doesn't have partial results.
4079 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4080 } else {
4081 liveRequest = true;
4082 requestIter->partial_result_cnt++;
4083 mPendingLiveRequest--;
4084
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004085 {
4086 Mutex::Autolock l(gHdrPlusClientLock);
4087 // For a live request, send the metadata to HDR+ client.
4088 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4089 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4090 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4091 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004092 }
4093 }
4094
4095 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4096 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4097 bool readyToSend = true;
4098
4099 // Iterate through the pending requests to send out shutter callbacks and results that are
4100 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4101 // live requests that don't have result metadata yet.
4102 auto iter = mPendingRequestsList.begin();
4103 while (iter != mPendingRequestsList.end()) {
4104 // Check if current pending request is ready. If it's not ready, the following pending
4105 // requests are also not ready.
4106 if (readyToSend && iter->resultMetadata == nullptr) {
4107 readyToSend = false;
4108 }
4109
4110 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4111
4112 std::vector<camera3_stream_buffer_t> outputBuffers;
4113
4114 camera3_capture_result_t result = {};
4115 result.frame_number = iter->frame_number;
4116 result.result = iter->resultMetadata;
4117 result.partial_result = iter->partial_result_cnt;
4118
4119 // If this pending buffer has result metadata, we may be able to send out shutter callback
4120 // and result metadata.
4121 if (iter->resultMetadata != nullptr) {
4122 if (!readyToSend) {
4123 // If any of the previous pending request is not ready, this pending request is
4124 // also not ready to send in order to keep shutter callbacks and result metadata
4125 // in order.
4126 iter++;
4127 continue;
4128 }
4129
4130 // Invoke shutter callback if not yet.
4131 if (!iter->shutter_notified) {
4132 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4133
4134 // Find the timestamp in HDR+ result metadata
4135 camera_metadata_ro_entry_t entry;
4136 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4137 ANDROID_SENSOR_TIMESTAMP, &entry);
4138 if (res != OK) {
4139 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4140 __FUNCTION__, iter->frame_number, strerror(-res), res);
4141 } else {
4142 timestamp = entry.data.i64[0];
4143 }
4144
4145 camera3_notify_msg_t notify_msg = {};
4146 notify_msg.type = CAMERA3_MSG_SHUTTER;
4147 notify_msg.message.shutter.frame_number = iter->frame_number;
4148 notify_msg.message.shutter.timestamp = timestamp;
4149 orchestrateNotify(&notify_msg);
4150 iter->shutter_notified = true;
4151 }
4152
4153 result.input_buffer = iter->input_buffer;
4154
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004155 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4156 // If the result metadata belongs to a live request, notify errors for previous pending
4157 // live requests.
4158 mPendingLiveRequest--;
4159
4160 CameraMetadata dummyMetadata;
4161 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4162 result.result = dummyMetadata.release();
4163
4164 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004165
4166 // partial_result should be PARTIAL_RESULT_CNT in case of
4167 // ERROR_RESULT.
4168 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4169 result.partial_result = PARTIAL_RESULT_COUNT;
4170
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 } else {
4172 iter++;
4173 continue;
4174 }
4175
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004176 // Prepare output buffer array
4177 for (auto bufferInfoIter = iter->buffers.begin();
4178 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4179 if (bufferInfoIter->buffer != nullptr) {
4180
4181 QCamera3Channel *channel =
4182 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4183 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4184
4185 // Check if this buffer is a dropped frame.
4186 auto frameDropIter = mPendingFrameDropList.begin();
4187 while (frameDropIter != mPendingFrameDropList.end()) {
4188 if((frameDropIter->stream_ID == streamID) &&
4189 (frameDropIter->frame_number == frameNumber)) {
4190 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4191 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4192 streamID);
4193 mPendingFrameDropList.erase(frameDropIter);
4194 break;
4195 } else {
4196 frameDropIter++;
4197 }
4198 }
4199
4200 // Check buffer error status
4201 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4202 bufferInfoIter->buffer->buffer);
4203 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4204
4205 outputBuffers.push_back(*(bufferInfoIter->buffer));
4206 free(bufferInfoIter->buffer);
4207 bufferInfoIter->buffer = NULL;
4208 }
4209 }
4210
4211 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4212 result.num_output_buffers = outputBuffers.size();
4213
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004214 orchestrateResult(&result);
4215
4216 // For reprocessing, result metadata is the same as settings so do not free it here to
4217 // avoid double free.
4218 if (result.result != iter->settings) {
4219 free_camera_metadata((camera_metadata_t *)result.result);
4220 }
4221 iter->resultMetadata = nullptr;
4222 iter = erasePendingRequest(iter);
4223 }
4224
4225 if (liveRequest) {
4226 for (auto &iter : mPendingRequestsList) {
4227 // Increment pipeline depth for the following pending requests.
4228 if (iter.frame_number > frameNumber) {
4229 iter.pipeline_depth++;
4230 }
4231 }
4232 }
4233
4234 unblockRequestIfNecessary();
4235}
4236
Thierry Strudel3d639192016-09-09 11:52:26 -07004237/*===========================================================================
4238 * FUNCTION : unblockRequestIfNecessary
4239 *
4240 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4241 * that mMutex is held when this function is called.
4242 *
4243 * PARAMETERS :
4244 *
4245 * RETURN :
4246 *
4247 *==========================================================================*/
4248void QCamera3HardwareInterface::unblockRequestIfNecessary()
4249{
4250 // Unblock process_capture_request
4251 pthread_cond_signal(&mRequestCond);
4252}
4253
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004254/*===========================================================================
4255 * FUNCTION : isHdrSnapshotRequest
4256 *
4257 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4258 *
4259 * PARAMETERS : camera3 request structure
4260 *
4261 * RETURN : boolean decision variable
4262 *
4263 *==========================================================================*/
4264bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4265{
4266 if (request == NULL) {
4267 LOGE("Invalid request handle");
4268 assert(0);
4269 return false;
4270 }
4271
4272 if (!mForceHdrSnapshot) {
4273 CameraMetadata frame_settings;
4274 frame_settings = request->settings;
4275
4276 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4277 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4278 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4279 return false;
4280 }
4281 } else {
4282 return false;
4283 }
4284
4285 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4286 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4287 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4288 return false;
4289 }
4290 } else {
4291 return false;
4292 }
4293 }
4294
4295 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4296 if (request->output_buffers[i].stream->format
4297 == HAL_PIXEL_FORMAT_BLOB) {
4298 return true;
4299 }
4300 }
4301
4302 return false;
4303}
4304/*===========================================================================
4305 * FUNCTION : orchestrateRequest
4306 *
4307 * DESCRIPTION: Orchestrates a capture request from camera service
4308 *
4309 * PARAMETERS :
4310 * @request : request from framework to process
4311 *
4312 * RETURN : Error status codes
4313 *
4314 *==========================================================================*/
4315int32_t QCamera3HardwareInterface::orchestrateRequest(
4316 camera3_capture_request_t *request)
4317{
4318
4319 uint32_t originalFrameNumber = request->frame_number;
4320 uint32_t originalOutputCount = request->num_output_buffers;
4321 const camera_metadata_t *original_settings = request->settings;
4322 List<InternalRequest> internallyRequestedStreams;
4323 List<InternalRequest> emptyInternalList;
4324
4325 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4326 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4327 uint32_t internalFrameNumber;
4328 CameraMetadata modified_meta;
4329
4330
4331 /* Add Blob channel to list of internally requested streams */
4332 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4333 if (request->output_buffers[i].stream->format
4334 == HAL_PIXEL_FORMAT_BLOB) {
4335 InternalRequest streamRequested;
4336 streamRequested.meteringOnly = 1;
4337 streamRequested.need_metadata = 0;
4338 streamRequested.stream = request->output_buffers[i].stream;
4339 internallyRequestedStreams.push_back(streamRequested);
4340 }
4341 }
4342 request->num_output_buffers = 0;
4343 auto itr = internallyRequestedStreams.begin();
4344
4345 /* Modify setting to set compensation */
4346 modified_meta = request->settings;
4347 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4348 uint8_t aeLock = 1;
4349 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4350 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4351 camera_metadata_t *modified_settings = modified_meta.release();
4352 request->settings = modified_settings;
4353
4354 /* Capture Settling & -2x frame */
4355 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4356 request->frame_number = internalFrameNumber;
4357 processCaptureRequest(request, internallyRequestedStreams);
4358
4359 request->num_output_buffers = originalOutputCount;
4360 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4361 request->frame_number = internalFrameNumber;
4362 processCaptureRequest(request, emptyInternalList);
4363 request->num_output_buffers = 0;
4364
4365 modified_meta = modified_settings;
4366 expCompensation = 0;
4367 aeLock = 1;
4368 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4369 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4370 modified_settings = modified_meta.release();
4371 request->settings = modified_settings;
4372
4373 /* Capture Settling & 0X frame */
4374
4375 itr = internallyRequestedStreams.begin();
4376 if (itr == internallyRequestedStreams.end()) {
4377 LOGE("Error Internally Requested Stream list is empty");
4378 assert(0);
4379 } else {
4380 itr->need_metadata = 0;
4381 itr->meteringOnly = 1;
4382 }
4383
4384 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4385 request->frame_number = internalFrameNumber;
4386 processCaptureRequest(request, internallyRequestedStreams);
4387
4388 itr = internallyRequestedStreams.begin();
4389 if (itr == internallyRequestedStreams.end()) {
4390 ALOGE("Error Internally Requested Stream list is empty");
4391 assert(0);
4392 } else {
4393 itr->need_metadata = 1;
4394 itr->meteringOnly = 0;
4395 }
4396
4397 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4398 request->frame_number = internalFrameNumber;
4399 processCaptureRequest(request, internallyRequestedStreams);
4400
4401 /* Capture 2X frame*/
4402 modified_meta = modified_settings;
4403 expCompensation = GB_HDR_2X_STEP_EV;
4404 aeLock = 1;
4405 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4406 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4407 modified_settings = modified_meta.release();
4408 request->settings = modified_settings;
4409
4410 itr = internallyRequestedStreams.begin();
4411 if (itr == internallyRequestedStreams.end()) {
4412 ALOGE("Error Internally Requested Stream list is empty");
4413 assert(0);
4414 } else {
4415 itr->need_metadata = 0;
4416 itr->meteringOnly = 1;
4417 }
4418 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4419 request->frame_number = internalFrameNumber;
4420 processCaptureRequest(request, internallyRequestedStreams);
4421
4422 itr = internallyRequestedStreams.begin();
4423 if (itr == internallyRequestedStreams.end()) {
4424 ALOGE("Error Internally Requested Stream list is empty");
4425 assert(0);
4426 } else {
4427 itr->need_metadata = 1;
4428 itr->meteringOnly = 0;
4429 }
4430
4431 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4432 request->frame_number = internalFrameNumber;
4433 processCaptureRequest(request, internallyRequestedStreams);
4434
4435
4436 /* Capture 2X on original streaming config*/
4437 internallyRequestedStreams.clear();
4438
4439 /* Restore original settings pointer */
4440 request->settings = original_settings;
4441 } else {
4442 uint32_t internalFrameNumber;
4443 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4444 request->frame_number = internalFrameNumber;
4445 return processCaptureRequest(request, internallyRequestedStreams);
4446 }
4447
4448 return NO_ERROR;
4449}
4450
4451/*===========================================================================
4452 * FUNCTION : orchestrateResult
4453 *
4454 * DESCRIPTION: Orchestrates a capture result to camera service
4455 *
4456 * PARAMETERS :
4457 * @request : request from framework to process
4458 *
4459 * RETURN :
4460 *
4461 *==========================================================================*/
4462void QCamera3HardwareInterface::orchestrateResult(
4463 camera3_capture_result_t *result)
4464{
4465 uint32_t frameworkFrameNumber;
4466 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4467 frameworkFrameNumber);
4468 if (rc != NO_ERROR) {
4469 LOGE("Cannot find translated frameworkFrameNumber");
4470 assert(0);
4471 } else {
4472 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004473 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004474 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004475 if (result->result != NULL) {
4476 CameraMetadata metadata;
4477 metadata.acquire((camera_metadata_t *)result->result);
4478 if (metadata.exists(ANDROID_SYNC_FRAME_NUMBER)) {
4479 int64_t sync_frame_number = frameworkFrameNumber;
4480 metadata.update(ANDROID_SYNC_FRAME_NUMBER, &sync_frame_number, 1);
4481 }
4482 result->result = metadata.release();
4483 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004484 result->frame_number = frameworkFrameNumber;
4485 mCallbackOps->process_capture_result(mCallbackOps, result);
4486 }
4487 }
4488}
4489
4490/*===========================================================================
4491 * FUNCTION : orchestrateNotify
4492 *
4493 * DESCRIPTION: Orchestrates a notify to camera service
4494 *
4495 * PARAMETERS :
4496 * @request : request from framework to process
4497 *
4498 * RETURN :
4499 *
4500 *==========================================================================*/
4501void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4502{
4503 uint32_t frameworkFrameNumber;
4504 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004505 int32_t rc = NO_ERROR;
4506
4507 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004508 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004509
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004510 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004511 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4512 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4513 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004514 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004515 LOGE("Cannot find translated frameworkFrameNumber");
4516 assert(0);
4517 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004518 }
4519 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004520
4521 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4522 LOGD("Internal Request drop the notifyCb");
4523 } else {
4524 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4525 mCallbackOps->notify(mCallbackOps, notify_msg);
4526 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004527}
4528
4529/*===========================================================================
4530 * FUNCTION : FrameNumberRegistry
4531 *
4532 * DESCRIPTION: Constructor
4533 *
4534 * PARAMETERS :
4535 *
4536 * RETURN :
4537 *
4538 *==========================================================================*/
4539FrameNumberRegistry::FrameNumberRegistry()
4540{
4541 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4542}
4543
4544/*===========================================================================
4545 * FUNCTION : ~FrameNumberRegistry
4546 *
4547 * DESCRIPTION: Destructor
4548 *
4549 * PARAMETERS :
4550 *
4551 * RETURN :
4552 *
4553 *==========================================================================*/
4554FrameNumberRegistry::~FrameNumberRegistry()
4555{
4556}
4557
4558/*===========================================================================
4559 * FUNCTION : PurgeOldEntriesLocked
4560 *
4561 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4562 *
4563 * PARAMETERS :
4564 *
4565 * RETURN : NONE
4566 *
4567 *==========================================================================*/
4568void FrameNumberRegistry::purgeOldEntriesLocked()
4569{
4570 while (_register.begin() != _register.end()) {
4571 auto itr = _register.begin();
4572 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4573 _register.erase(itr);
4574 } else {
4575 return;
4576 }
4577 }
4578}
4579
4580/*===========================================================================
4581 * FUNCTION : allocStoreInternalFrameNumber
4582 *
4583 * DESCRIPTION: Method to note down a framework request and associate a new
4584 * internal request number against it
4585 *
4586 * PARAMETERS :
4587 * @fFrameNumber: Identifier given by framework
4588 * @internalFN : Output parameter which will have the newly generated internal
4589 * entry
4590 *
4591 * RETURN : Error code
4592 *
4593 *==========================================================================*/
4594int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4595 uint32_t &internalFrameNumber)
4596{
4597 Mutex::Autolock lock(mRegistryLock);
4598 internalFrameNumber = _nextFreeInternalNumber++;
4599 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4600 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4601 purgeOldEntriesLocked();
4602 return NO_ERROR;
4603}
4604
4605/*===========================================================================
4606 * FUNCTION : generateStoreInternalFrameNumber
4607 *
4608 * DESCRIPTION: Method to associate a new internal request number independent
4609 * of any associate with framework requests
4610 *
4611 * PARAMETERS :
4612 * @internalFrame#: Output parameter which will have the newly generated internal
4613 *
4614 *
4615 * RETURN : Error code
4616 *
4617 *==========================================================================*/
4618int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4619{
4620 Mutex::Autolock lock(mRegistryLock);
4621 internalFrameNumber = _nextFreeInternalNumber++;
4622 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4623 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4624 purgeOldEntriesLocked();
4625 return NO_ERROR;
4626}
4627
4628/*===========================================================================
4629 * FUNCTION : getFrameworkFrameNumber
4630 *
4631 * DESCRIPTION: Method to query the framework framenumber given an internal #
4632 *
4633 * PARAMETERS :
4634 * @internalFrame#: Internal reference
4635 * @frameworkframenumber: Output parameter holding framework frame entry
4636 *
4637 * RETURN : Error code
4638 *
4639 *==========================================================================*/
4640int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4641 uint32_t &frameworkFrameNumber)
4642{
4643 Mutex::Autolock lock(mRegistryLock);
4644 auto itr = _register.find(internalFrameNumber);
4645 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004646 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 return -ENOENT;
4648 }
4649
4650 frameworkFrameNumber = itr->second;
4651 purgeOldEntriesLocked();
4652 return NO_ERROR;
4653}
Thierry Strudel3d639192016-09-09 11:52:26 -07004654
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004655status_t QCamera3HardwareInterface::fillPbStreamConfig(
4656 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4657 QCamera3Channel *channel, uint32_t streamIndex) {
4658 if (config == nullptr) {
4659 LOGE("%s: config is null", __FUNCTION__);
4660 return BAD_VALUE;
4661 }
4662
4663 if (channel == nullptr) {
4664 LOGE("%s: channel is null", __FUNCTION__);
4665 return BAD_VALUE;
4666 }
4667
4668 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4669 if (stream == nullptr) {
4670 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4671 return NAME_NOT_FOUND;
4672 }
4673
4674 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4675 if (streamInfo == nullptr) {
4676 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4677 return NAME_NOT_FOUND;
4678 }
4679
4680 config->id = pbStreamId;
4681 config->image.width = streamInfo->dim.width;
4682 config->image.height = streamInfo->dim.height;
4683 config->image.padding = 0;
4684 config->image.format = pbStreamFormat;
4685
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004686 uint32_t totalPlaneSize = 0;
4687
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004688 // Fill plane information.
4689 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4690 pbcamera::PlaneConfiguration plane;
4691 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4692 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4693 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004694
4695 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004696 }
4697
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004698 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004699 return OK;
4700}
4701
Thierry Strudel3d639192016-09-09 11:52:26 -07004702/*===========================================================================
4703 * FUNCTION : processCaptureRequest
4704 *
4705 * DESCRIPTION: process a capture request from camera service
4706 *
4707 * PARAMETERS :
4708 * @request : request from framework to process
4709 *
4710 * RETURN :
4711 *
4712 *==========================================================================*/
4713int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004714 camera3_capture_request_t *request,
4715 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004716{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004717 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004718 int rc = NO_ERROR;
4719 int32_t request_id;
4720 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004721 bool isVidBufRequested = false;
4722 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004723 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004724
4725 pthread_mutex_lock(&mMutex);
4726
4727 // Validate current state
4728 switch (mState) {
4729 case CONFIGURED:
4730 case STARTED:
4731 /* valid state */
4732 break;
4733
4734 case ERROR:
4735 pthread_mutex_unlock(&mMutex);
4736 handleCameraDeviceError();
4737 return -ENODEV;
4738
4739 default:
4740 LOGE("Invalid state %d", mState);
4741 pthread_mutex_unlock(&mMutex);
4742 return -ENODEV;
4743 }
4744
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004745 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004746 if (rc != NO_ERROR) {
4747 LOGE("incoming request is not valid");
4748 pthread_mutex_unlock(&mMutex);
4749 return rc;
4750 }
4751
4752 meta = request->settings;
4753
4754 // For first capture request, send capture intent, and
4755 // stream on all streams
4756 if (mState == CONFIGURED) {
4757 // send an unconfigure to the backend so that the isp
4758 // resources are deallocated
4759 if (!mFirstConfiguration) {
4760 cam_stream_size_info_t stream_config_info;
4761 int32_t hal_version = CAM_HAL_V3;
4762 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4763 stream_config_info.buffer_info.min_buffers =
4764 MIN_INFLIGHT_REQUESTS;
4765 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004766 m_bIs4KVideo ? 0 :
4767 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004768 clear_metadata_buffer(mParameters);
4769 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4770 CAM_INTF_PARM_HAL_VERSION, hal_version);
4771 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4772 CAM_INTF_META_STREAM_INFO, stream_config_info);
4773 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4774 mParameters);
4775 if (rc < 0) {
4776 LOGE("set_parms for unconfigure failed");
4777 pthread_mutex_unlock(&mMutex);
4778 return rc;
4779 }
4780 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004781 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004785 property_get("persist.camera.is_type", is_type_value, "4");
4786 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4787 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4788 property_get("persist.camera.is_type_preview", is_type_value, "4");
4789 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4790 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004791
4792 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4793 int32_t hal_version = CAM_HAL_V3;
4794 uint8_t captureIntent =
4795 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4796 mCaptureIntent = captureIntent;
4797 clear_metadata_buffer(mParameters);
4798 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4799 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4800 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004801 if (mFirstConfiguration) {
4802 // configure instant AEC
4803 // Instant AEC is a session based parameter and it is needed only
4804 // once per complete session after open camera.
4805 // i.e. This is set only once for the first capture request, after open camera.
4806 setInstantAEC(meta);
4807 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004808 uint8_t fwkVideoStabMode=0;
4809 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4810 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4811 }
4812
Xue Tuecac74e2017-04-17 13:58:15 -07004813 // If EIS setprop is enabled then only turn it on for video/preview
4814 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004815 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004816 int32_t vsMode;
4817 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4819 rc = BAD_VALUE;
4820 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004821 LOGD("setEis %d", setEis);
4822 bool eis3Supported = false;
4823 size_t count = IS_TYPE_MAX;
4824 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4825 for (size_t i = 0; i < count; i++) {
4826 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4827 eis3Supported = true;
4828 break;
4829 }
4830 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004831
4832 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004833 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004834 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4835 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004836 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4837 is_type = isTypePreview;
4838 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4839 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4840 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004842 } else {
4843 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 } else {
4846 is_type = IS_TYPE_NONE;
4847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004848 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004849 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004850 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4851 }
4852 }
4853
4854 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4855 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4856
Thierry Strudel54dc9782017-02-15 12:12:10 -08004857 //Disable tintless only if the property is set to 0
4858 memset(prop, 0, sizeof(prop));
4859 property_get("persist.camera.tintless.enable", prop, "1");
4860 int32_t tintless_value = atoi(prop);
4861
Thierry Strudel3d639192016-09-09 11:52:26 -07004862 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4863 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004864
Thierry Strudel3d639192016-09-09 11:52:26 -07004865 //Disable CDS for HFR mode or if DIS/EIS is on.
4866 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4867 //after every configure_stream
4868 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4869 (m_bIsVideo)) {
4870 int32_t cds = CAM_CDS_MODE_OFF;
4871 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4872 CAM_INTF_PARM_CDS_MODE, cds))
4873 LOGE("Failed to disable CDS for HFR mode");
4874
4875 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004876
4877 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4878 uint8_t* use_av_timer = NULL;
4879
4880 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004881 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004882 use_av_timer = &m_debug_avtimer;
4883 }
4884 else{
4885 use_av_timer =
4886 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004887 if (use_av_timer) {
4888 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4889 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004890 }
4891
4892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4893 rc = BAD_VALUE;
4894 }
4895 }
4896
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 setMobicat();
4898
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004899 uint8_t nrMode = 0;
4900 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4901 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4902 }
4903
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 /* Set fps and hfr mode while sending meta stream info so that sensor
4905 * can configure appropriate streaming mode */
4906 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4908 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004909 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4910 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 if (rc == NO_ERROR) {
4912 int32_t max_fps =
4913 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004914 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4916 }
4917 /* For HFR, more buffers are dequeued upfront to improve the performance */
4918 if (mBatchSize) {
4919 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4920 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4921 }
4922 }
4923 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 LOGE("setHalFpsRange failed");
4925 }
4926 }
4927 if (meta.exists(ANDROID_CONTROL_MODE)) {
4928 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4929 rc = extractSceneMode(meta, metaMode, mParameters);
4930 if (rc != NO_ERROR) {
4931 LOGE("extractSceneMode failed");
4932 }
4933 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004934 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004935
Thierry Strudel04e026f2016-10-10 11:27:36 -07004936 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4937 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4938 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4939 rc = setVideoHdrMode(mParameters, vhdr);
4940 if (rc != NO_ERROR) {
4941 LOGE("setVideoHDR is failed");
4942 }
4943 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004945 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004946 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004947 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004948 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4950 sensorModeFullFov)) {
4951 rc = BAD_VALUE;
4952 }
4953 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004954 //TODO: validate the arguments, HSV scenemode should have only the
4955 //advertised fps ranges
4956
4957 /*set the capture intent, hal version, tintless, stream info,
4958 *and disenable parameters to the backend*/
4959 LOGD("set_parms META_STREAM_INFO " );
4960 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004961 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4962 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 mStreamConfigInfo.type[i],
4964 mStreamConfigInfo.stream_sizes[i].width,
4965 mStreamConfigInfo.stream_sizes[i].height,
4966 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004967 mStreamConfigInfo.format[i],
4968 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4972 mParameters);
4973 if (rc < 0) {
4974 LOGE("set_parms failed for hal version, stream info");
4975 }
4976
Chien-Yu Chenee335912017-02-09 17:53:20 -08004977 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4978 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 if (rc != NO_ERROR) {
4980 LOGE("Failed to get sensor output size");
4981 pthread_mutex_unlock(&mMutex);
4982 goto error_exit;
4983 }
4984
4985 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4986 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004987 mSensorModeInfo.active_array_size.width,
4988 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004989
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004990 {
4991 Mutex::Autolock l(gHdrPlusClientLock);
4992 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004993 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004994 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4995 if (rc != OK) {
4996 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4997 mCameraId, mSensorModeInfo.op_pixel_clk);
4998 pthread_mutex_unlock(&mMutex);
4999 goto error_exit;
5000 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005001 }
5002 }
5003
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 /* Set batchmode before initializing channel. Since registerBuffer
5005 * internally initializes some of the channels, better set batchmode
5006 * even before first register buffer */
5007 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5008 it != mStreamInfo.end(); it++) {
5009 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5010 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5011 && mBatchSize) {
5012 rc = channel->setBatchSize(mBatchSize);
5013 //Disable per frame map unmap for HFR/batchmode case
5014 rc |= channel->setPerFrameMapUnmap(false);
5015 if (NO_ERROR != rc) {
5016 LOGE("Channel init failed %d", rc);
5017 pthread_mutex_unlock(&mMutex);
5018 goto error_exit;
5019 }
5020 }
5021 }
5022
5023 //First initialize all streams
5024 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5025 it != mStreamInfo.end(); it++) {
5026 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005027
5028 /* Initial value of NR mode is needed before stream on */
5029 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005030 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5031 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 setEis) {
5033 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5034 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5035 is_type = mStreamConfigInfo.is_type[i];
5036 break;
5037 }
5038 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005040 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 rc = channel->initialize(IS_TYPE_NONE);
5042 }
5043 if (NO_ERROR != rc) {
5044 LOGE("Channel initialization failed %d", rc);
5045 pthread_mutex_unlock(&mMutex);
5046 goto error_exit;
5047 }
5048 }
5049
5050 if (mRawDumpChannel) {
5051 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5052 if (rc != NO_ERROR) {
5053 LOGE("Error: Raw Dump Channel init failed");
5054 pthread_mutex_unlock(&mMutex);
5055 goto error_exit;
5056 }
5057 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005058 if (mHdrPlusRawSrcChannel) {
5059 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5060 if (rc != NO_ERROR) {
5061 LOGE("Error: HDR+ RAW Source Channel init failed");
5062 pthread_mutex_unlock(&mMutex);
5063 goto error_exit;
5064 }
5065 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 if (mSupportChannel) {
5067 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5068 if (rc < 0) {
5069 LOGE("Support channel initialization failed");
5070 pthread_mutex_unlock(&mMutex);
5071 goto error_exit;
5072 }
5073 }
5074 if (mAnalysisChannel) {
5075 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5076 if (rc < 0) {
5077 LOGE("Analysis channel initialization failed");
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081 }
5082 if (mDummyBatchChannel) {
5083 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5084 if (rc < 0) {
5085 LOGE("mDummyBatchChannel setBatchSize failed");
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005089 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 if (rc < 0) {
5091 LOGE("mDummyBatchChannel initialization failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095 }
5096
5097 // Set bundle info
5098 rc = setBundleInfo();
5099 if (rc < 0) {
5100 LOGE("setBundleInfo failed %d", rc);
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104
5105 //update settings from app here
5106 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5107 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5108 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5109 }
5110 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5111 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5112 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5113 }
5114 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5115 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5116 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5117
5118 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5119 (mLinkedCameraId != mCameraId) ) {
5120 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5121 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 goto error_exit;
5124 }
5125 }
5126
5127 // add bundle related cameras
5128 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5129 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005130 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5131 &m_pDualCamCmdPtr->bundle_info;
5132 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005133 if (mIsDeviceLinked)
5134 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5135 else
5136 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5137
5138 pthread_mutex_lock(&gCamLock);
5139
5140 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5141 LOGE("Dualcam: Invalid Session Id ");
5142 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005143 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005144 goto error_exit;
5145 }
5146
5147 if (mIsMainCamera == 1) {
5148 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5149 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005150 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005151 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005152 // related session id should be session id of linked session
5153 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5154 } else {
5155 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5156 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005157 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005158 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5160 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005161 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005162 pthread_mutex_unlock(&gCamLock);
5163
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005164 rc = mCameraHandle->ops->set_dual_cam_cmd(
5165 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 if (rc < 0) {
5167 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005168 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 goto error_exit;
5170 }
5171 }
5172
5173 //Then start them.
5174 LOGH("Start META Channel");
5175 rc = mMetadataChannel->start();
5176 if (rc < 0) {
5177 LOGE("META channel start failed");
5178 pthread_mutex_unlock(&mMutex);
5179 goto error_exit;
5180 }
5181
5182 if (mAnalysisChannel) {
5183 rc = mAnalysisChannel->start();
5184 if (rc < 0) {
5185 LOGE("Analysis channel start failed");
5186 mMetadataChannel->stop();
5187 pthread_mutex_unlock(&mMutex);
5188 goto error_exit;
5189 }
5190 }
5191
5192 if (mSupportChannel) {
5193 rc = mSupportChannel->start();
5194 if (rc < 0) {
5195 LOGE("Support channel start failed");
5196 mMetadataChannel->stop();
5197 /* Although support and analysis are mutually exclusive today
5198 adding it in anycase for future proofing */
5199 if (mAnalysisChannel) {
5200 mAnalysisChannel->stop();
5201 }
5202 pthread_mutex_unlock(&mMutex);
5203 goto error_exit;
5204 }
5205 }
5206 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5207 it != mStreamInfo.end(); it++) {
5208 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5209 LOGH("Start Processing Channel mask=%d",
5210 channel->getStreamTypeMask());
5211 rc = channel->start();
5212 if (rc < 0) {
5213 LOGE("channel start failed");
5214 pthread_mutex_unlock(&mMutex);
5215 goto error_exit;
5216 }
5217 }
5218
5219 if (mRawDumpChannel) {
5220 LOGD("Starting raw dump stream");
5221 rc = mRawDumpChannel->start();
5222 if (rc != NO_ERROR) {
5223 LOGE("Error Starting Raw Dump Channel");
5224 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5225 it != mStreamInfo.end(); it++) {
5226 QCamera3Channel *channel =
5227 (QCamera3Channel *)(*it)->stream->priv;
5228 LOGH("Stopping Processing Channel mask=%d",
5229 channel->getStreamTypeMask());
5230 channel->stop();
5231 }
5232 if (mSupportChannel)
5233 mSupportChannel->stop();
5234 if (mAnalysisChannel) {
5235 mAnalysisChannel->stop();
5236 }
5237 mMetadataChannel->stop();
5238 pthread_mutex_unlock(&mMutex);
5239 goto error_exit;
5240 }
5241 }
5242
5243 if (mChannelHandle) {
5244
5245 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5246 mChannelHandle);
5247 if (rc != NO_ERROR) {
5248 LOGE("start_channel failed %d", rc);
5249 pthread_mutex_unlock(&mMutex);
5250 goto error_exit;
5251 }
5252 }
5253
5254 goto no_error;
5255error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005256 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005257 return rc;
5258no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 mWokenUpByDaemon = false;
5260 mPendingLiveRequest = 0;
5261 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 }
5263
Chien-Yu Chenee335912017-02-09 17:53:20 -08005264 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen63fc73b2017-04-26 16:43:28 -07005265 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005266 Mutex::Autolock l(gHdrPlusClientLock);
5267 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5268 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5269 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5270 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5271 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5272 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005273 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005274 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005275 pthread_mutex_unlock(&mMutex);
5276 return rc;
5277 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005278
5279 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005280 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005281 }
5282
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005284 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005285
5286 if (mFlushPerf) {
5287 //we cannot accept any requests during flush
5288 LOGE("process_capture_request cannot proceed during flush");
5289 pthread_mutex_unlock(&mMutex);
5290 return NO_ERROR; //should return an error
5291 }
5292
5293 if (meta.exists(ANDROID_REQUEST_ID)) {
5294 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5295 mCurrentRequestId = request_id;
5296 LOGD("Received request with id: %d", request_id);
5297 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5298 LOGE("Unable to find request id field, \
5299 & no previous id available");
5300 pthread_mutex_unlock(&mMutex);
5301 return NAME_NOT_FOUND;
5302 } else {
5303 LOGD("Re-using old request id");
5304 request_id = mCurrentRequestId;
5305 }
5306
5307 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5308 request->num_output_buffers,
5309 request->input_buffer,
5310 frameNumber);
5311 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005312 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005314 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 uint32_t snapshotStreamId = 0;
5316 for (size_t i = 0; i < request->num_output_buffers; i++) {
5317 const camera3_stream_buffer_t& output = request->output_buffers[i];
5318 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5319
Emilian Peev7650c122017-01-19 08:24:33 -08005320 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5321 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005322 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005323 blob_request = 1;
5324 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5325 }
5326
5327 if (output.acquire_fence != -1) {
5328 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5329 close(output.acquire_fence);
5330 if (rc != OK) {
5331 LOGE("sync wait failed %d", rc);
5332 pthread_mutex_unlock(&mMutex);
5333 return rc;
5334 }
5335 }
5336
Emilian Peev0f3c3162017-03-15 12:57:46 +00005337 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5338 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005339 depthRequestPresent = true;
5340 continue;
5341 }
5342
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005343 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005344 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005345
5346 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5347 isVidBufRequested = true;
5348 }
5349 }
5350
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005351 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5352 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5353 itr++) {
5354 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5355 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5356 channel->getStreamID(channel->getStreamTypeMask());
5357
5358 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5359 isVidBufRequested = true;
5360 }
5361 }
5362
Thierry Strudel3d639192016-09-09 11:52:26 -07005363 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005364 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005365 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 }
5367 if (blob_request && mRawDumpChannel) {
5368 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005369 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005370 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005371 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005372 }
5373
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005374 {
5375 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5376 // Request a RAW buffer if
5377 // 1. mHdrPlusRawSrcChannel is valid.
5378 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5379 // 3. There is no pending HDR+ request.
5380 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5381 mHdrPlusPendingRequests.size() == 0) {
5382 streamsArray.stream_request[streamsArray.num_streams].streamID =
5383 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5384 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5385 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005386 }
5387
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388 //extract capture intent
5389 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5390 mCaptureIntent =
5391 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5392 }
5393
5394 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5395 mCacMode =
5396 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5397 }
5398
5399 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005400 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005401
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005402 {
5403 Mutex::Autolock l(gHdrPlusClientLock);
5404 // If this request has a still capture intent, try to submit an HDR+ request.
5405 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5406 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5407 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5408 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005409 }
5410
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005411 if (hdrPlusRequest) {
5412 // For a HDR+ request, just set the frame parameters.
5413 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5414 if (rc < 0) {
5415 LOGE("fail to set frame parameters");
5416 pthread_mutex_unlock(&mMutex);
5417 return rc;
5418 }
5419 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 /* Parse the settings:
5421 * - For every request in NORMAL MODE
5422 * - For every request in HFR mode during preview only case
5423 * - For first request of every batch in HFR mode during video
5424 * recording. In batchmode the same settings except frame number is
5425 * repeated in each request of the batch.
5426 */
5427 if (!mBatchSize ||
5428 (mBatchSize && !isVidBufRequested) ||
5429 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005430 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 if (rc < 0) {
5432 LOGE("fail to set frame parameters");
5433 pthread_mutex_unlock(&mMutex);
5434 return rc;
5435 }
5436 }
5437 /* For batchMode HFR, setFrameParameters is not called for every
5438 * request. But only frame number of the latest request is parsed.
5439 * Keep track of first and last frame numbers in a batch so that
5440 * metadata for the frame numbers of batch can be duplicated in
5441 * handleBatchMetadta */
5442 if (mBatchSize) {
5443 if (!mToBeQueuedVidBufs) {
5444 //start of the batch
5445 mFirstFrameNumberInBatch = request->frame_number;
5446 }
5447 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5448 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5449 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005450 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005451 return BAD_VALUE;
5452 }
5453 }
5454 if (mNeedSensorRestart) {
5455 /* Unlock the mutex as restartSensor waits on the channels to be
5456 * stopped, which in turn calls stream callback functions -
5457 * handleBufferWithLock and handleMetadataWithLock */
5458 pthread_mutex_unlock(&mMutex);
5459 rc = dynamicUpdateMetaStreamInfo();
5460 if (rc != NO_ERROR) {
5461 LOGE("Restarting the sensor failed");
5462 return BAD_VALUE;
5463 }
5464 mNeedSensorRestart = false;
5465 pthread_mutex_lock(&mMutex);
5466 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005467 if(mResetInstantAEC) {
5468 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5469 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5470 mResetInstantAEC = false;
5471 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005472 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005473 if (request->input_buffer->acquire_fence != -1) {
5474 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5475 close(request->input_buffer->acquire_fence);
5476 if (rc != OK) {
5477 LOGE("input buffer sync wait failed %d", rc);
5478 pthread_mutex_unlock(&mMutex);
5479 return rc;
5480 }
5481 }
5482 }
5483
5484 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5485 mLastCustIntentFrmNum = frameNumber;
5486 }
5487 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005488 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 pendingRequestIterator latestRequest;
5490 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005491 pendingRequest.num_buffers = depthRequestPresent ?
5492 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005493 pendingRequest.request_id = request_id;
5494 pendingRequest.blob_request = blob_request;
5495 pendingRequest.timestamp = 0;
5496 pendingRequest.bUrgentReceived = 0;
5497 if (request->input_buffer) {
5498 pendingRequest.input_buffer =
5499 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5500 *(pendingRequest.input_buffer) = *(request->input_buffer);
5501 pInputBuffer = pendingRequest.input_buffer;
5502 } else {
5503 pendingRequest.input_buffer = NULL;
5504 pInputBuffer = NULL;
5505 }
5506
5507 pendingRequest.pipeline_depth = 0;
5508 pendingRequest.partial_result_cnt = 0;
5509 extractJpegMetadata(mCurJpegMeta, request);
5510 pendingRequest.jpegMetadata = mCurJpegMeta;
5511 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5512 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005513 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005514 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5515 mHybridAeEnable =
5516 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5517 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005518
5519 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5520 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005521 /* DevCamDebug metadata processCaptureRequest */
5522 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5523 mDevCamDebugMetaEnable =
5524 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5525 }
5526 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5527 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005528
5529 //extract CAC info
5530 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5531 mCacMode =
5532 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5533 }
5534 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005535 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005536
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005537 // extract enableZsl info
5538 if (gExposeEnableZslKey) {
5539 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5540 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5541 mZslEnabled = pendingRequest.enableZsl;
5542 } else {
5543 pendingRequest.enableZsl = mZslEnabled;
5544 }
5545 }
5546
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 PendingBuffersInRequest bufsForCurRequest;
5548 bufsForCurRequest.frame_number = frameNumber;
5549 // Mark current timestamp for the new request
5550 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005552
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005553 if (hdrPlusRequest) {
5554 // Save settings for this request.
5555 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5556 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5557
5558 // Add to pending HDR+ request queue.
5559 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5560 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5561
5562 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5563 }
5564
Thierry Strudel3d639192016-09-09 11:52:26 -07005565 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005566 if ((request->output_buffers[i].stream->data_space ==
5567 HAL_DATASPACE_DEPTH) &&
5568 (HAL_PIXEL_FORMAT_BLOB ==
5569 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005570 continue;
5571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005572 RequestedBufferInfo requestedBuf;
5573 memset(&requestedBuf, 0, sizeof(requestedBuf));
5574 requestedBuf.stream = request->output_buffers[i].stream;
5575 requestedBuf.buffer = NULL;
5576 pendingRequest.buffers.push_back(requestedBuf);
5577
5578 // Add to buffer handle the pending buffers list
5579 PendingBufferInfo bufferInfo;
5580 bufferInfo.buffer = request->output_buffers[i].buffer;
5581 bufferInfo.stream = request->output_buffers[i].stream;
5582 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5583 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5584 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5585 frameNumber, bufferInfo.buffer,
5586 channel->getStreamTypeMask(), bufferInfo.stream->format);
5587 }
5588 // Add this request packet into mPendingBuffersMap
5589 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5590 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5591 mPendingBuffersMap.get_num_overall_buffers());
5592
5593 latestRequest = mPendingRequestsList.insert(
5594 mPendingRequestsList.end(), pendingRequest);
5595 if(mFlush) {
5596 LOGI("mFlush is true");
5597 pthread_mutex_unlock(&mMutex);
5598 return NO_ERROR;
5599 }
5600
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5602 // channel.
5603 if (!hdrPlusRequest) {
5604 int indexUsed;
5605 // Notify metadata channel we receive a request
5606 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 if(request->input_buffer != NULL){
5609 LOGD("Input request, frame_number %d", frameNumber);
5610 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5611 if (NO_ERROR != rc) {
5612 LOGE("fail to set reproc parameters");
5613 pthread_mutex_unlock(&mMutex);
5614 return rc;
5615 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005616 }
5617
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005618 // Call request on other streams
5619 uint32_t streams_need_metadata = 0;
5620 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5621 for (size_t i = 0; i < request->num_output_buffers; i++) {
5622 const camera3_stream_buffer_t& output = request->output_buffers[i];
5623 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5624
5625 if (channel == NULL) {
5626 LOGW("invalid channel pointer for stream");
5627 continue;
5628 }
5629
5630 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5631 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5632 output.buffer, request->input_buffer, frameNumber);
5633 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005635 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5636 if (rc < 0) {
5637 LOGE("Fail to request on picture channel");
5638 pthread_mutex_unlock(&mMutex);
5639 return rc;
5640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005642 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5643 assert(NULL != mDepthChannel);
5644 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005645
Emilian Peev7650c122017-01-19 08:24:33 -08005646 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5647 if (rc < 0) {
5648 LOGE("Fail to map on depth buffer");
5649 pthread_mutex_unlock(&mMutex);
5650 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005651 }
Emilian Peev7650c122017-01-19 08:24:33 -08005652 } else {
5653 LOGD("snapshot request with buffer %p, frame_number %d",
5654 output.buffer, frameNumber);
5655 if (!request->settings) {
5656 rc = channel->request(output.buffer, frameNumber,
5657 NULL, mPrevParameters, indexUsed);
5658 } else {
5659 rc = channel->request(output.buffer, frameNumber,
5660 NULL, mParameters, indexUsed);
5661 }
5662 if (rc < 0) {
5663 LOGE("Fail to request on picture channel");
5664 pthread_mutex_unlock(&mMutex);
5665 return rc;
5666 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005667
Emilian Peev7650c122017-01-19 08:24:33 -08005668 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5669 uint32_t j = 0;
5670 for (j = 0; j < streamsArray.num_streams; j++) {
5671 if (streamsArray.stream_request[j].streamID == streamId) {
5672 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5673 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5674 else
5675 streamsArray.stream_request[j].buf_index = indexUsed;
5676 break;
5677 }
5678 }
5679 if (j == streamsArray.num_streams) {
5680 LOGE("Did not find matching stream to update index");
5681 assert(0);
5682 }
5683
5684 pendingBufferIter->need_metadata = true;
5685 streams_need_metadata++;
5686 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005687 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005688 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5689 bool needMetadata = false;
5690 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5691 rc = yuvChannel->request(output.buffer, frameNumber,
5692 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5693 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005694 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005695 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 pthread_mutex_unlock(&mMutex);
5697 return rc;
5698 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005699
5700 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5701 uint32_t j = 0;
5702 for (j = 0; j < streamsArray.num_streams; j++) {
5703 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005704 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5705 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5706 else
5707 streamsArray.stream_request[j].buf_index = indexUsed;
5708 break;
5709 }
5710 }
5711 if (j == streamsArray.num_streams) {
5712 LOGE("Did not find matching stream to update index");
5713 assert(0);
5714 }
5715
5716 pendingBufferIter->need_metadata = needMetadata;
5717 if (needMetadata)
5718 streams_need_metadata += 1;
5719 LOGD("calling YUV channel request, need_metadata is %d",
5720 needMetadata);
5721 } else {
5722 LOGD("request with buffer %p, frame_number %d",
5723 output.buffer, frameNumber);
5724
5725 rc = channel->request(output.buffer, frameNumber, indexUsed);
5726
5727 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5728 uint32_t j = 0;
5729 for (j = 0; j < streamsArray.num_streams; j++) {
5730 if (streamsArray.stream_request[j].streamID == streamId) {
5731 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5732 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5733 else
5734 streamsArray.stream_request[j].buf_index = indexUsed;
5735 break;
5736 }
5737 }
5738 if (j == streamsArray.num_streams) {
5739 LOGE("Did not find matching stream to update index");
5740 assert(0);
5741 }
5742
5743 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5744 && mBatchSize) {
5745 mToBeQueuedVidBufs++;
5746 if (mToBeQueuedVidBufs == mBatchSize) {
5747 channel->queueBatchBuf();
5748 }
5749 }
5750 if (rc < 0) {
5751 LOGE("request failed");
5752 pthread_mutex_unlock(&mMutex);
5753 return rc;
5754 }
5755 }
5756 pendingBufferIter++;
5757 }
5758
5759 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5760 itr++) {
5761 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5762
5763 if (channel == NULL) {
5764 LOGE("invalid channel pointer for stream");
5765 assert(0);
5766 return BAD_VALUE;
5767 }
5768
5769 InternalRequest requestedStream;
5770 requestedStream = (*itr);
5771
5772
5773 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5774 LOGD("snapshot request internally input buffer %p, frame_number %d",
5775 request->input_buffer, frameNumber);
5776 if(request->input_buffer != NULL){
5777 rc = channel->request(NULL, frameNumber,
5778 pInputBuffer, &mReprocMeta, indexUsed, true,
5779 requestedStream.meteringOnly);
5780 if (rc < 0) {
5781 LOGE("Fail to request on picture channel");
5782 pthread_mutex_unlock(&mMutex);
5783 return rc;
5784 }
5785 } else {
5786 LOGD("snapshot request with frame_number %d", frameNumber);
5787 if (!request->settings) {
5788 rc = channel->request(NULL, frameNumber,
5789 NULL, mPrevParameters, indexUsed, true,
5790 requestedStream.meteringOnly);
5791 } else {
5792 rc = channel->request(NULL, frameNumber,
5793 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5794 }
5795 if (rc < 0) {
5796 LOGE("Fail to request on picture channel");
5797 pthread_mutex_unlock(&mMutex);
5798 return rc;
5799 }
5800
5801 if ((*itr).meteringOnly != 1) {
5802 requestedStream.need_metadata = 1;
5803 streams_need_metadata++;
5804 }
5805 }
5806
5807 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5808 uint32_t j = 0;
5809 for (j = 0; j < streamsArray.num_streams; j++) {
5810 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005811 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5812 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5813 else
5814 streamsArray.stream_request[j].buf_index = indexUsed;
5815 break;
5816 }
5817 }
5818 if (j == streamsArray.num_streams) {
5819 LOGE("Did not find matching stream to update index");
5820 assert(0);
5821 }
5822
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005823 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005825 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005826 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005827 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005828 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005829 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005831 //If 2 streams have need_metadata set to true, fail the request, unless
5832 //we copy/reference count the metadata buffer
5833 if (streams_need_metadata > 1) {
5834 LOGE("not supporting request in which two streams requires"
5835 " 2 HAL metadata for reprocessing");
5836 pthread_mutex_unlock(&mMutex);
5837 return -EINVAL;
5838 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005839
Emilian Peev7650c122017-01-19 08:24:33 -08005840 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5842 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5843 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5844 pthread_mutex_unlock(&mMutex);
5845 return BAD_VALUE;
5846 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005847 if (request->input_buffer == NULL) {
5848 /* Set the parameters to backend:
5849 * - For every request in NORMAL MODE
5850 * - For every request in HFR mode during preview only case
5851 * - Once every batch in HFR mode during video recording
5852 */
5853 if (!mBatchSize ||
5854 (mBatchSize && !isVidBufRequested) ||
5855 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5856 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5857 mBatchSize, isVidBufRequested,
5858 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005859
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005860 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5861 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5862 uint32_t m = 0;
5863 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5864 if (streamsArray.stream_request[k].streamID ==
5865 mBatchedStreamsArray.stream_request[m].streamID)
5866 break;
5867 }
5868 if (m == mBatchedStreamsArray.num_streams) {
5869 mBatchedStreamsArray.stream_request\
5870 [mBatchedStreamsArray.num_streams].streamID =
5871 streamsArray.stream_request[k].streamID;
5872 mBatchedStreamsArray.stream_request\
5873 [mBatchedStreamsArray.num_streams].buf_index =
5874 streamsArray.stream_request[k].buf_index;
5875 mBatchedStreamsArray.num_streams =
5876 mBatchedStreamsArray.num_streams + 1;
5877 }
5878 }
5879 streamsArray = mBatchedStreamsArray;
5880 }
5881 /* Update stream id of all the requested buffers */
5882 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5883 streamsArray)) {
5884 LOGE("Failed to set stream type mask in the parameters");
5885 return BAD_VALUE;
5886 }
5887
5888 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5889 mParameters);
5890 if (rc < 0) {
5891 LOGE("set_parms failed");
5892 }
5893 /* reset to zero coz, the batch is queued */
5894 mToBeQueuedVidBufs = 0;
5895 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5896 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5897 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005898 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5899 uint32_t m = 0;
5900 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5901 if (streamsArray.stream_request[k].streamID ==
5902 mBatchedStreamsArray.stream_request[m].streamID)
5903 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005904 }
5905 if (m == mBatchedStreamsArray.num_streams) {
5906 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5907 streamID = streamsArray.stream_request[k].streamID;
5908 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5909 buf_index = streamsArray.stream_request[k].buf_index;
5910 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5911 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005912 }
5913 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005914 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005915 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005916 }
5917
5918 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5919
5920 mState = STARTED;
5921 // Added a timed condition wait
5922 struct timespec ts;
5923 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005924 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005925 if (rc < 0) {
5926 isValidTimeout = 0;
5927 LOGE("Error reading the real time clock!!");
5928 }
5929 else {
5930 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005931 int64_t timeout = 5;
5932 {
5933 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5934 // If there is a pending HDR+ request, the following requests may be blocked until the
5935 // HDR+ request is done. So allow a longer timeout.
5936 if (mHdrPlusPendingRequests.size() > 0) {
5937 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5938 }
5939 }
5940 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005941 }
5942 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005943 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005944 (mState != ERROR) && (mState != DEINIT)) {
5945 if (!isValidTimeout) {
5946 LOGD("Blocking on conditional wait");
5947 pthread_cond_wait(&mRequestCond, &mMutex);
5948 }
5949 else {
5950 LOGD("Blocking on timed conditional wait");
5951 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5952 if (rc == ETIMEDOUT) {
5953 rc = -ENODEV;
5954 LOGE("Unblocked on timeout!!!!");
5955 break;
5956 }
5957 }
5958 LOGD("Unblocked");
5959 if (mWokenUpByDaemon) {
5960 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005961 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005962 break;
5963 }
5964 }
5965 pthread_mutex_unlock(&mMutex);
5966
5967 return rc;
5968}
5969
5970/*===========================================================================
5971 * FUNCTION : dump
5972 *
5973 * DESCRIPTION:
5974 *
5975 * PARAMETERS :
5976 *
5977 *
5978 * RETURN :
5979 *==========================================================================*/
5980void QCamera3HardwareInterface::dump(int fd)
5981{
5982 pthread_mutex_lock(&mMutex);
5983 dprintf(fd, "\n Camera HAL3 information Begin \n");
5984
5985 dprintf(fd, "\nNumber of pending requests: %zu \n",
5986 mPendingRequestsList.size());
5987 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5988 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5989 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5990 for(pendingRequestIterator i = mPendingRequestsList.begin();
5991 i != mPendingRequestsList.end(); i++) {
5992 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5993 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5994 i->input_buffer);
5995 }
5996 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5997 mPendingBuffersMap.get_num_overall_buffers());
5998 dprintf(fd, "-------+------------------\n");
5999 dprintf(fd, " Frame | Stream type mask \n");
6000 dprintf(fd, "-------+------------------\n");
6001 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6002 for(auto &j : req.mPendingBufferList) {
6003 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6004 dprintf(fd, " %5d | %11d \n",
6005 req.frame_number, channel->getStreamTypeMask());
6006 }
6007 }
6008 dprintf(fd, "-------+------------------\n");
6009
6010 dprintf(fd, "\nPending frame drop list: %zu\n",
6011 mPendingFrameDropList.size());
6012 dprintf(fd, "-------+-----------\n");
6013 dprintf(fd, " Frame | Stream ID \n");
6014 dprintf(fd, "-------+-----------\n");
6015 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6016 i != mPendingFrameDropList.end(); i++) {
6017 dprintf(fd, " %5d | %9d \n",
6018 i->frame_number, i->stream_ID);
6019 }
6020 dprintf(fd, "-------+-----------\n");
6021
6022 dprintf(fd, "\n Camera HAL3 information End \n");
6023
6024 /* use dumpsys media.camera as trigger to send update debug level event */
6025 mUpdateDebugLevel = true;
6026 pthread_mutex_unlock(&mMutex);
6027 return;
6028}
6029
6030/*===========================================================================
6031 * FUNCTION : flush
6032 *
6033 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6034 * conditionally restarts channels
6035 *
6036 * PARAMETERS :
6037 * @ restartChannels: re-start all channels
6038 *
6039 *
6040 * RETURN :
6041 * 0 on success
6042 * Error code on failure
6043 *==========================================================================*/
6044int QCamera3HardwareInterface::flush(bool restartChannels)
6045{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006046 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006047 int32_t rc = NO_ERROR;
6048
6049 LOGD("Unblocking Process Capture Request");
6050 pthread_mutex_lock(&mMutex);
6051 mFlush = true;
6052 pthread_mutex_unlock(&mMutex);
6053
6054 rc = stopAllChannels();
6055 // unlink of dualcam
6056 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006057 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6058 &m_pDualCamCmdPtr->bundle_info;
6059 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006060 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6061 pthread_mutex_lock(&gCamLock);
6062
6063 if (mIsMainCamera == 1) {
6064 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6065 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006066 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006067 // related session id should be session id of linked session
6068 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6069 } else {
6070 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6071 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006072 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6074 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006075 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 pthread_mutex_unlock(&gCamLock);
6077
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006078 rc = mCameraHandle->ops->set_dual_cam_cmd(
6079 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006080 if (rc < 0) {
6081 LOGE("Dualcam: Unlink failed, but still proceed to close");
6082 }
6083 }
6084
6085 if (rc < 0) {
6086 LOGE("stopAllChannels failed");
6087 return rc;
6088 }
6089 if (mChannelHandle) {
6090 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6091 mChannelHandle);
6092 }
6093
6094 // Reset bundle info
6095 rc = setBundleInfo();
6096 if (rc < 0) {
6097 LOGE("setBundleInfo failed %d", rc);
6098 return rc;
6099 }
6100
6101 // Mutex Lock
6102 pthread_mutex_lock(&mMutex);
6103
6104 // Unblock process_capture_request
6105 mPendingLiveRequest = 0;
6106 pthread_cond_signal(&mRequestCond);
6107
6108 rc = notifyErrorForPendingRequests();
6109 if (rc < 0) {
6110 LOGE("notifyErrorForPendingRequests failed");
6111 pthread_mutex_unlock(&mMutex);
6112 return rc;
6113 }
6114
6115 mFlush = false;
6116
6117 // Start the Streams/Channels
6118 if (restartChannels) {
6119 rc = startAllChannels();
6120 if (rc < 0) {
6121 LOGE("startAllChannels failed");
6122 pthread_mutex_unlock(&mMutex);
6123 return rc;
6124 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006125 if (mChannelHandle) {
6126 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6127 mChannelHandle);
6128 if (rc < 0) {
6129 LOGE("start_channel failed");
6130 pthread_mutex_unlock(&mMutex);
6131 return rc;
6132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 }
6134 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006135 pthread_mutex_unlock(&mMutex);
6136
6137 return 0;
6138}
6139
6140/*===========================================================================
6141 * FUNCTION : flushPerf
6142 *
6143 * DESCRIPTION: This is the performance optimization version of flush that does
6144 * not use stream off, rather flushes the system
6145 *
6146 * PARAMETERS :
6147 *
6148 *
6149 * RETURN : 0 : success
6150 * -EINVAL: input is malformed (device is not valid)
6151 * -ENODEV: if the device has encountered a serious error
6152 *==========================================================================*/
6153int QCamera3HardwareInterface::flushPerf()
6154{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006155 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006156 int32_t rc = 0;
6157 struct timespec timeout;
6158 bool timed_wait = false;
6159
6160 pthread_mutex_lock(&mMutex);
6161 mFlushPerf = true;
6162 mPendingBuffersMap.numPendingBufsAtFlush =
6163 mPendingBuffersMap.get_num_overall_buffers();
6164 LOGD("Calling flush. Wait for %d buffers to return",
6165 mPendingBuffersMap.numPendingBufsAtFlush);
6166
6167 /* send the flush event to the backend */
6168 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6169 if (rc < 0) {
6170 LOGE("Error in flush: IOCTL failure");
6171 mFlushPerf = false;
6172 pthread_mutex_unlock(&mMutex);
6173 return -ENODEV;
6174 }
6175
6176 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6177 LOGD("No pending buffers in HAL, return flush");
6178 mFlushPerf = false;
6179 pthread_mutex_unlock(&mMutex);
6180 return rc;
6181 }
6182
6183 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006184 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006185 if (rc < 0) {
6186 LOGE("Error reading the real time clock, cannot use timed wait");
6187 } else {
6188 timeout.tv_sec += FLUSH_TIMEOUT;
6189 timed_wait = true;
6190 }
6191
6192 //Block on conditional variable
6193 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6194 LOGD("Waiting on mBuffersCond");
6195 if (!timed_wait) {
6196 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6197 if (rc != 0) {
6198 LOGE("pthread_cond_wait failed due to rc = %s",
6199 strerror(rc));
6200 break;
6201 }
6202 } else {
6203 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6204 if (rc != 0) {
6205 LOGE("pthread_cond_timedwait failed due to rc = %s",
6206 strerror(rc));
6207 break;
6208 }
6209 }
6210 }
6211 if (rc != 0) {
6212 mFlushPerf = false;
6213 pthread_mutex_unlock(&mMutex);
6214 return -ENODEV;
6215 }
6216
6217 LOGD("Received buffers, now safe to return them");
6218
6219 //make sure the channels handle flush
6220 //currently only required for the picture channel to release snapshot resources
6221 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6222 it != mStreamInfo.end(); it++) {
6223 QCamera3Channel *channel = (*it)->channel;
6224 if (channel) {
6225 rc = channel->flush();
6226 if (rc) {
6227 LOGE("Flushing the channels failed with error %d", rc);
6228 // even though the channel flush failed we need to continue and
6229 // return the buffers we have to the framework, however the return
6230 // value will be an error
6231 rc = -ENODEV;
6232 }
6233 }
6234 }
6235
6236 /* notify the frameworks and send errored results */
6237 rc = notifyErrorForPendingRequests();
6238 if (rc < 0) {
6239 LOGE("notifyErrorForPendingRequests failed");
6240 pthread_mutex_unlock(&mMutex);
6241 return rc;
6242 }
6243
6244 //unblock process_capture_request
6245 mPendingLiveRequest = 0;
6246 unblockRequestIfNecessary();
6247
6248 mFlushPerf = false;
6249 pthread_mutex_unlock(&mMutex);
6250 LOGD ("Flush Operation complete. rc = %d", rc);
6251 return rc;
6252}
6253
6254/*===========================================================================
6255 * FUNCTION : handleCameraDeviceError
6256 *
6257 * DESCRIPTION: This function calls internal flush and notifies the error to
6258 * framework and updates the state variable.
6259 *
6260 * PARAMETERS : None
6261 *
6262 * RETURN : NO_ERROR on Success
6263 * Error code on failure
6264 *==========================================================================*/
6265int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6266{
6267 int32_t rc = NO_ERROR;
6268
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006269 {
6270 Mutex::Autolock lock(mFlushLock);
6271 pthread_mutex_lock(&mMutex);
6272 if (mState != ERROR) {
6273 //if mState != ERROR, nothing to be done
6274 pthread_mutex_unlock(&mMutex);
6275 return NO_ERROR;
6276 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006277 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006278
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006279 rc = flush(false /* restart channels */);
6280 if (NO_ERROR != rc) {
6281 LOGE("internal flush to handle mState = ERROR failed");
6282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006283
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006284 pthread_mutex_lock(&mMutex);
6285 mState = DEINIT;
6286 pthread_mutex_unlock(&mMutex);
6287 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006288
6289 camera3_notify_msg_t notify_msg;
6290 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6291 notify_msg.type = CAMERA3_MSG_ERROR;
6292 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6293 notify_msg.message.error.error_stream = NULL;
6294 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006295 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006296
6297 return rc;
6298}
6299
6300/*===========================================================================
6301 * FUNCTION : captureResultCb
6302 *
6303 * DESCRIPTION: Callback handler for all capture result
6304 * (streams, as well as metadata)
6305 *
6306 * PARAMETERS :
6307 * @metadata : metadata information
6308 * @buffer : actual gralloc buffer to be returned to frameworks.
6309 * NULL if metadata.
6310 *
6311 * RETURN : NONE
6312 *==========================================================================*/
6313void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6314 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6315{
6316 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006317 pthread_mutex_lock(&mMutex);
6318 uint8_t batchSize = mBatchSize;
6319 pthread_mutex_unlock(&mMutex);
6320 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006321 handleBatchMetadata(metadata_buf,
6322 true /* free_and_bufdone_meta_buf */);
6323 } else { /* mBatchSize = 0 */
6324 hdrPlusPerfLock(metadata_buf);
6325 pthread_mutex_lock(&mMutex);
6326 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006327 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006328 true /* last urgent frame of batch metadata */,
6329 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006330 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006331 pthread_mutex_unlock(&mMutex);
6332 }
6333 } else if (isInputBuffer) {
6334 pthread_mutex_lock(&mMutex);
6335 handleInputBufferWithLock(frame_number);
6336 pthread_mutex_unlock(&mMutex);
6337 } else {
6338 pthread_mutex_lock(&mMutex);
6339 handleBufferWithLock(buffer, frame_number);
6340 pthread_mutex_unlock(&mMutex);
6341 }
6342 return;
6343}
6344
6345/*===========================================================================
6346 * FUNCTION : getReprocessibleOutputStreamId
6347 *
6348 * DESCRIPTION: Get source output stream id for the input reprocess stream
6349 * based on size and format, which would be the largest
6350 * output stream if an input stream exists.
6351 *
6352 * PARAMETERS :
6353 * @id : return the stream id if found
6354 *
6355 * RETURN : int32_t type of status
6356 * NO_ERROR -- success
6357 * none-zero failure code
6358 *==========================================================================*/
6359int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6360{
6361 /* check if any output or bidirectional stream with the same size and format
6362 and return that stream */
6363 if ((mInputStreamInfo.dim.width > 0) &&
6364 (mInputStreamInfo.dim.height > 0)) {
6365 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6366 it != mStreamInfo.end(); it++) {
6367
6368 camera3_stream_t *stream = (*it)->stream;
6369 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6370 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6371 (stream->format == mInputStreamInfo.format)) {
6372 // Usage flag for an input stream and the source output stream
6373 // may be different.
6374 LOGD("Found reprocessible output stream! %p", *it);
6375 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6376 stream->usage, mInputStreamInfo.usage);
6377
6378 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6379 if (channel != NULL && channel->mStreams[0]) {
6380 id = channel->mStreams[0]->getMyServerID();
6381 return NO_ERROR;
6382 }
6383 }
6384 }
6385 } else {
6386 LOGD("No input stream, so no reprocessible output stream");
6387 }
6388 return NAME_NOT_FOUND;
6389}
6390
6391/*===========================================================================
6392 * FUNCTION : lookupFwkName
6393 *
6394 * DESCRIPTION: In case the enum is not same in fwk and backend
6395 * make sure the parameter is correctly propogated
6396 *
6397 * PARAMETERS :
6398 * @arr : map between the two enums
6399 * @len : len of the map
6400 * @hal_name : name of the hal_parm to map
6401 *
6402 * RETURN : int type of status
6403 * fwk_name -- success
6404 * none-zero failure code
6405 *==========================================================================*/
6406template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6407 size_t len, halType hal_name)
6408{
6409
6410 for (size_t i = 0; i < len; i++) {
6411 if (arr[i].hal_name == hal_name) {
6412 return arr[i].fwk_name;
6413 }
6414 }
6415
6416 /* Not able to find matching framework type is not necessarily
6417 * an error case. This happens when mm-camera supports more attributes
6418 * than the frameworks do */
6419 LOGH("Cannot find matching framework type");
6420 return NAME_NOT_FOUND;
6421}
6422
6423/*===========================================================================
6424 * FUNCTION : lookupHalName
6425 *
6426 * DESCRIPTION: In case the enum is not same in fwk and backend
6427 * make sure the parameter is correctly propogated
6428 *
6429 * PARAMETERS :
6430 * @arr : map between the two enums
6431 * @len : len of the map
6432 * @fwk_name : name of the hal_parm to map
6433 *
6434 * RETURN : int32_t type of status
6435 * hal_name -- success
6436 * none-zero failure code
6437 *==========================================================================*/
6438template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6439 size_t len, fwkType fwk_name)
6440{
6441 for (size_t i = 0; i < len; i++) {
6442 if (arr[i].fwk_name == fwk_name) {
6443 return arr[i].hal_name;
6444 }
6445 }
6446
6447 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6448 return NAME_NOT_FOUND;
6449}
6450
6451/*===========================================================================
6452 * FUNCTION : lookupProp
6453 *
6454 * DESCRIPTION: lookup a value by its name
6455 *
6456 * PARAMETERS :
6457 * @arr : map between the two enums
6458 * @len : size of the map
6459 * @name : name to be looked up
6460 *
6461 * RETURN : Value if found
6462 * CAM_CDS_MODE_MAX if not found
6463 *==========================================================================*/
6464template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6465 size_t len, const char *name)
6466{
6467 if (name) {
6468 for (size_t i = 0; i < len; i++) {
6469 if (!strcmp(arr[i].desc, name)) {
6470 return arr[i].val;
6471 }
6472 }
6473 }
6474 return CAM_CDS_MODE_MAX;
6475}
6476
6477/*===========================================================================
6478 *
6479 * DESCRIPTION:
6480 *
6481 * PARAMETERS :
6482 * @metadata : metadata information from callback
6483 * @timestamp: metadata buffer timestamp
6484 * @request_id: request id
6485 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006486 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006487 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6488 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006489 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006490 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6491 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006492 *
6493 * RETURN : camera_metadata_t*
6494 * metadata in a format specified by fwk
6495 *==========================================================================*/
6496camera_metadata_t*
6497QCamera3HardwareInterface::translateFromHalMetadata(
6498 metadata_buffer_t *metadata,
6499 nsecs_t timestamp,
6500 int32_t request_id,
6501 const CameraMetadata& jpegMetadata,
6502 uint8_t pipeline_depth,
6503 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006504 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006505 /* DevCamDebug metadata translateFromHalMetadata argument */
6506 uint8_t DevCamDebug_meta_enable,
6507 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006508 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006509 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006510 bool lastMetadataInBatch,
6511 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006512{
6513 CameraMetadata camMetadata;
6514 camera_metadata_t *resultMetadata;
6515
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006516 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006517 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6518 * Timestamp is needed because it's used for shutter notify calculation.
6519 * */
6520 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6521 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006522 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006523 }
6524
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 if (jpegMetadata.entryCount())
6526 camMetadata.append(jpegMetadata);
6527
6528 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6529 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6530 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6531 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006532 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006533 if (mBatchSize == 0) {
6534 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6535 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6536 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006537
Samuel Ha68ba5172016-12-15 18:41:12 -08006538 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6539 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6540 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6541 // DevCamDebug metadata translateFromHalMetadata AF
6542 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6543 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6544 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6545 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6546 }
6547 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6548 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6549 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6550 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6551 }
6552 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6553 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6554 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6555 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6558 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6559 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6560 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6561 }
6562 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6563 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6564 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6565 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6566 }
6567 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6568 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6569 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6570 *DevCamDebug_af_monitor_pdaf_target_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6572 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6575 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6576 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6577 *DevCamDebug_af_monitor_pdaf_confidence;
6578 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6579 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6580 }
6581 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6582 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6583 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6584 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6585 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6586 }
6587 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6588 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6589 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6590 *DevCamDebug_af_monitor_tof_target_pos;
6591 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6592 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6593 }
6594 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6595 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6596 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6597 *DevCamDebug_af_monitor_tof_confidence;
6598 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6599 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6600 }
6601 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6602 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6603 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6604 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6605 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6606 }
6607 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6608 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6609 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6610 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6611 &fwk_DevCamDebug_af_monitor_type_select, 1);
6612 }
6613 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6614 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6615 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6616 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6617 &fwk_DevCamDebug_af_monitor_refocus, 1);
6618 }
6619 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6620 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6621 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6622 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6623 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6624 }
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6626 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6627 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6628 *DevCamDebug_af_search_pdaf_target_pos;
6629 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6630 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6631 }
6632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6633 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6634 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6635 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6636 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6637 }
6638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6639 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6640 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6641 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6642 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6645 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6646 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6647 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6648 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6649 }
6650 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6651 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6652 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6653 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6654 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6655 }
6656 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6657 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6658 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6659 *DevCamDebug_af_search_tof_target_pos;
6660 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6661 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6664 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6665 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6667 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6670 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6671 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6673 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6676 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6677 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6678 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6679 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6682 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6683 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6684 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6685 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6686 }
6687 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6688 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6689 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6690 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6691 &fwk_DevCamDebug_af_search_type_select, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6694 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6695 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6696 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6697 &fwk_DevCamDebug_af_search_next_pos, 1);
6698 }
6699 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6700 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6701 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6702 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6703 &fwk_DevCamDebug_af_search_target_pos, 1);
6704 }
6705 // DevCamDebug metadata translateFromHalMetadata AEC
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6707 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6708 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6709 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6712 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6713 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6714 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6715 }
6716 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6717 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6718 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6719 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6722 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6723 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6724 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6727 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6728 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6729 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6730 }
6731 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6732 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6733 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6734 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6737 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6738 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6739 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6740 }
6741 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6742 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6743 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6744 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6745 }
Samuel Ha34229982017-02-17 13:51:11 -08006746 // DevCamDebug metadata translateFromHalMetadata zzHDR
6747 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6748 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6749 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6750 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6751 }
6752 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6753 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006754 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006755 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6756 }
6757 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6758 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6759 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6760 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6761 }
6762 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6763 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006764 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006765 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6766 }
6767 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6768 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6769 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6770 *DevCamDebug_aec_hdr_sensitivity_ratio;
6771 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6772 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6773 }
6774 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6775 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6776 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6777 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6778 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6779 }
6780 // DevCamDebug metadata translateFromHalMetadata ADRC
6781 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6782 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6783 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6784 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6785 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6786 }
6787 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6788 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6789 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6790 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6791 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6792 }
6793 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6794 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6795 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6796 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6797 }
6798 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6799 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6800 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6801 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6802 }
6803 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6804 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6805 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6806 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6807 }
6808 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6809 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6810 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6811 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6812 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006813 // DevCamDebug metadata translateFromHalMetadata AWB
6814 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6815 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6816 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6817 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6818 }
6819 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6820 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6821 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6822 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6823 }
6824 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6825 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6826 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6827 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6830 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6831 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6832 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6833 }
6834 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6835 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6836 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6837 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6838 }
6839 }
6840 // atrace_end(ATRACE_TAG_ALWAYS);
6841
Thierry Strudel3d639192016-09-09 11:52:26 -07006842 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6843 int64_t fwk_frame_number = *frame_number;
6844 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6845 }
6846
6847 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6848 int32_t fps_range[2];
6849 fps_range[0] = (int32_t)float_range->min_fps;
6850 fps_range[1] = (int32_t)float_range->max_fps;
6851 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6852 fps_range, 2);
6853 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6854 fps_range[0], fps_range[1]);
6855 }
6856
6857 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6858 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6859 }
6860
6861 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6862 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6863 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6864 *sceneMode);
6865 if (NAME_NOT_FOUND != val) {
6866 uint8_t fwkSceneMode = (uint8_t)val;
6867 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6868 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6869 fwkSceneMode);
6870 }
6871 }
6872
6873 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6874 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6875 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6876 }
6877
6878 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6879 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6880 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6881 }
6882
6883 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6884 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6885 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6886 }
6887
6888 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6889 CAM_INTF_META_EDGE_MODE, metadata) {
6890 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6891 }
6892
6893 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6894 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6895 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6896 }
6897
6898 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6899 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6900 }
6901
6902 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6903 if (0 <= *flashState) {
6904 uint8_t fwk_flashState = (uint8_t) *flashState;
6905 if (!gCamCapability[mCameraId]->flash_available) {
6906 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6907 }
6908 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6909 }
6910 }
6911
6912 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6913 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6914 if (NAME_NOT_FOUND != val) {
6915 uint8_t fwk_flashMode = (uint8_t)val;
6916 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6917 }
6918 }
6919
6920 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6921 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6922 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6923 }
6924
6925 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6926 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6930 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6931 }
6932
6933 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6934 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6935 }
6936
6937 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6938 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6939 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6940 }
6941
6942 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6943 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6944 LOGD("fwk_videoStab = %d", fwk_videoStab);
6945 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6946 } else {
6947 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6948 // and so hardcoding the Video Stab result to OFF mode.
6949 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6950 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006951 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006952 }
6953
6954 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6955 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6956 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6957 }
6958
6959 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6960 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6961 }
6962
Thierry Strudel3d639192016-09-09 11:52:26 -07006963 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6964 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006965 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006966
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006967 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6968 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006969
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006970 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006971 blackLevelAppliedPattern->cam_black_level[0],
6972 blackLevelAppliedPattern->cam_black_level[1],
6973 blackLevelAppliedPattern->cam_black_level[2],
6974 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006975 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6976 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006977
6978#ifndef USE_HAL_3_3
6979 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306980 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006981 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306982 fwk_blackLevelInd[0] /= 16.0;
6983 fwk_blackLevelInd[1] /= 16.0;
6984 fwk_blackLevelInd[2] /= 16.0;
6985 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006986 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6987 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006988#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006989 }
6990
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006991#ifndef USE_HAL_3_3
6992 // Fixed whitelevel is used by ISP/Sensor
6993 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6994 &gCamCapability[mCameraId]->white_level, 1);
6995#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006996
6997 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6998 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6999 int32_t scalerCropRegion[4];
7000 scalerCropRegion[0] = hScalerCropRegion->left;
7001 scalerCropRegion[1] = hScalerCropRegion->top;
7002 scalerCropRegion[2] = hScalerCropRegion->width;
7003 scalerCropRegion[3] = hScalerCropRegion->height;
7004
7005 // Adjust crop region from sensor output coordinate system to active
7006 // array coordinate system.
7007 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7008 scalerCropRegion[2], scalerCropRegion[3]);
7009
7010 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7011 }
7012
7013 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7014 LOGD("sensorExpTime = %lld", *sensorExpTime);
7015 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7016 }
7017
7018 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7019 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7020 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7021 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7022 }
7023
7024 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7025 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7026 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7027 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7028 sensorRollingShutterSkew, 1);
7029 }
7030
7031 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7032 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7033 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7034
7035 //calculate the noise profile based on sensitivity
7036 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7037 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7038 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7039 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7040 noise_profile[i] = noise_profile_S;
7041 noise_profile[i+1] = noise_profile_O;
7042 }
7043 LOGD("noise model entry (S, O) is (%f, %f)",
7044 noise_profile_S, noise_profile_O);
7045 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7046 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7047 }
7048
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007049#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007050 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007051 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007052 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007053 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007054 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7055 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7056 }
7057 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007058#endif
7059
Thierry Strudel3d639192016-09-09 11:52:26 -07007060 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7061 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7062 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7063 }
7064
7065 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7066 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7067 *faceDetectMode);
7068 if (NAME_NOT_FOUND != val) {
7069 uint8_t fwk_faceDetectMode = (uint8_t)val;
7070 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7071
7072 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7073 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7074 CAM_INTF_META_FACE_DETECTION, metadata) {
7075 uint8_t numFaces = MIN(
7076 faceDetectionInfo->num_faces_detected, MAX_ROI);
7077 int32_t faceIds[MAX_ROI];
7078 uint8_t faceScores[MAX_ROI];
7079 int32_t faceRectangles[MAX_ROI * 4];
7080 int32_t faceLandmarks[MAX_ROI * 6];
7081 size_t j = 0, k = 0;
7082
7083 for (size_t i = 0; i < numFaces; i++) {
7084 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7085 // Adjust crop region from sensor output coordinate system to active
7086 // array coordinate system.
7087 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7088 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7089 rect.width, rect.height);
7090
7091 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7092 faceRectangles+j, -1);
7093
Jason Lee8ce36fa2017-04-19 19:40:37 -07007094 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7095 "bottom-right (%d, %d)",
7096 faceDetectionInfo->frame_id, i,
7097 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7098 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7099
Thierry Strudel3d639192016-09-09 11:52:26 -07007100 j+= 4;
7101 }
7102 if (numFaces <= 0) {
7103 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7104 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7105 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7106 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7107 }
7108
7109 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7110 numFaces);
7111 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7112 faceRectangles, numFaces * 4U);
7113 if (fwk_faceDetectMode ==
7114 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7115 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7116 CAM_INTF_META_FACE_LANDMARK, metadata) {
7117
7118 for (size_t i = 0; i < numFaces; i++) {
7119 // Map the co-ordinate sensor output coordinate system to active
7120 // array coordinate system.
7121 mCropRegionMapper.toActiveArray(
7122 landmarks->face_landmarks[i].left_eye_center.x,
7123 landmarks->face_landmarks[i].left_eye_center.y);
7124 mCropRegionMapper.toActiveArray(
7125 landmarks->face_landmarks[i].right_eye_center.x,
7126 landmarks->face_landmarks[i].right_eye_center.y);
7127 mCropRegionMapper.toActiveArray(
7128 landmarks->face_landmarks[i].mouth_center.x,
7129 landmarks->face_landmarks[i].mouth_center.y);
7130
7131 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007132
7133 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7134 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7135 faceDetectionInfo->frame_id, i,
7136 faceLandmarks[k + LEFT_EYE_X],
7137 faceLandmarks[k + LEFT_EYE_Y],
7138 faceLandmarks[k + RIGHT_EYE_X],
7139 faceLandmarks[k + RIGHT_EYE_Y],
7140 faceLandmarks[k + MOUTH_X],
7141 faceLandmarks[k + MOUTH_Y]);
7142
Thierry Strudel04e026f2016-10-10 11:27:36 -07007143 k+= TOTAL_LANDMARK_INDICES;
7144 }
7145 } else {
7146 for (size_t i = 0; i < numFaces; i++) {
7147 setInvalidLandmarks(faceLandmarks+k);
7148 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007149 }
7150 }
7151
Jason Lee49619db2017-04-13 12:07:22 -07007152 for (size_t i = 0; i < numFaces; i++) {
7153 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7154
7155 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7156 faceDetectionInfo->frame_id, i, faceIds[i]);
7157 }
7158
Thierry Strudel3d639192016-09-09 11:52:26 -07007159 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7160 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7161 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007162 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007163 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7164 CAM_INTF_META_FACE_BLINK, metadata) {
7165 uint8_t detected[MAX_ROI];
7166 uint8_t degree[MAX_ROI * 2];
7167 for (size_t i = 0; i < numFaces; i++) {
7168 detected[i] = blinks->blink[i].blink_detected;
7169 degree[2 * i] = blinks->blink[i].left_blink;
7170 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007171
Jason Lee49619db2017-04-13 12:07:22 -07007172 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7173 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7174 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7175 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007176 }
7177 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7178 detected, numFaces);
7179 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7180 degree, numFaces * 2);
7181 }
7182 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7183 CAM_INTF_META_FACE_SMILE, metadata) {
7184 uint8_t degree[MAX_ROI];
7185 uint8_t confidence[MAX_ROI];
7186 for (size_t i = 0; i < numFaces; i++) {
7187 degree[i] = smiles->smile[i].smile_degree;
7188 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189
Jason Lee49619db2017-04-13 12:07:22 -07007190 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7191 "smile_degree=%d, smile_score=%d",
7192 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007193 }
7194 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7195 degree, numFaces);
7196 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7197 confidence, numFaces);
7198 }
7199 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7200 CAM_INTF_META_FACE_GAZE, metadata) {
7201 int8_t angle[MAX_ROI];
7202 int32_t direction[MAX_ROI * 3];
7203 int8_t degree[MAX_ROI * 2];
7204 for (size_t i = 0; i < numFaces; i++) {
7205 angle[i] = gazes->gaze[i].gaze_angle;
7206 direction[3 * i] = gazes->gaze[i].updown_dir;
7207 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7208 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7209 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7210 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007211
7212 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7213 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7214 "left_right_gaze=%d, top_bottom_gaze=%d",
7215 faceDetectionInfo->frame_id, i, angle[i],
7216 direction[3 * i], direction[3 * i + 1],
7217 direction[3 * i + 2],
7218 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007219 }
7220 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7221 (uint8_t *)angle, numFaces);
7222 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7223 direction, numFaces * 3);
7224 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7225 (uint8_t *)degree, numFaces * 2);
7226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007227 }
7228 }
7229 }
7230 }
7231
7232 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7233 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007235 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237
Shuzhen Wang14415f52016-11-16 18:26:18 -08007238 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7239 histogramBins = *histBins;
7240 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7241 }
7242
7243 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7245 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007246 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007247
7248 switch (stats_data->type) {
7249 case CAM_HISTOGRAM_TYPE_BAYER:
7250 switch (stats_data->bayer_stats.data_type) {
7251 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007252 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7253 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007254 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007255 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7256 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007258 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7259 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007261 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007262 case CAM_STATS_CHANNEL_R:
7263 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007264 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7265 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007266 }
7267 break;
7268 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007269 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007270 break;
7271 }
7272
Shuzhen Wang14415f52016-11-16 18:26:18 -08007273 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007274 }
7275 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007276 }
7277
7278 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7279 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7280 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7281 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7282 }
7283
7284 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7285 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7286 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7287 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7288 }
7289
7290 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7291 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7292 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7293 CAM_MAX_SHADING_MAP_HEIGHT);
7294 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7295 CAM_MAX_SHADING_MAP_WIDTH);
7296 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7297 lensShadingMap->lens_shading, 4U * map_width * map_height);
7298 }
7299
7300 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7301 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7302 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7303 }
7304
7305 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7306 //Populate CAM_INTF_META_TONEMAP_CURVES
7307 /* ch0 = G, ch 1 = B, ch 2 = R*/
7308 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7309 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7310 tonemap->tonemap_points_cnt,
7311 CAM_MAX_TONEMAP_CURVE_SIZE);
7312 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7313 }
7314
7315 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7316 &tonemap->curves[0].tonemap_points[0][0],
7317 tonemap->tonemap_points_cnt * 2);
7318
7319 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7320 &tonemap->curves[1].tonemap_points[0][0],
7321 tonemap->tonemap_points_cnt * 2);
7322
7323 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7324 &tonemap->curves[2].tonemap_points[0][0],
7325 tonemap->tonemap_points_cnt * 2);
7326 }
7327
7328 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7329 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7330 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7331 CC_GAIN_MAX);
7332 }
7333
7334 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7335 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7336 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7337 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7338 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7339 }
7340
7341 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7342 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7343 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7344 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7345 toneCurve->tonemap_points_cnt,
7346 CAM_MAX_TONEMAP_CURVE_SIZE);
7347 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7348 }
7349 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7350 (float*)toneCurve->curve.tonemap_points,
7351 toneCurve->tonemap_points_cnt * 2);
7352 }
7353
7354 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7355 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7356 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7357 predColorCorrectionGains->gains, 4);
7358 }
7359
7360 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7361 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7362 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7363 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7364 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7365 }
7366
7367 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7368 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7369 }
7370
7371 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7372 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7373 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7374 }
7375
7376 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7377 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7378 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7379 }
7380
7381 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7382 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7383 *effectMode);
7384 if (NAME_NOT_FOUND != val) {
7385 uint8_t fwk_effectMode = (uint8_t)val;
7386 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7387 }
7388 }
7389
7390 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7391 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7392 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7393 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7394 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7395 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7396 }
7397 int32_t fwk_testPatternData[4];
7398 fwk_testPatternData[0] = testPatternData->r;
7399 fwk_testPatternData[3] = testPatternData->b;
7400 switch (gCamCapability[mCameraId]->color_arrangement) {
7401 case CAM_FILTER_ARRANGEMENT_RGGB:
7402 case CAM_FILTER_ARRANGEMENT_GRBG:
7403 fwk_testPatternData[1] = testPatternData->gr;
7404 fwk_testPatternData[2] = testPatternData->gb;
7405 break;
7406 case CAM_FILTER_ARRANGEMENT_GBRG:
7407 case CAM_FILTER_ARRANGEMENT_BGGR:
7408 fwk_testPatternData[2] = testPatternData->gr;
7409 fwk_testPatternData[1] = testPatternData->gb;
7410 break;
7411 default:
7412 LOGE("color arrangement %d is not supported",
7413 gCamCapability[mCameraId]->color_arrangement);
7414 break;
7415 }
7416 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7417 }
7418
7419 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7420 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7421 }
7422
7423 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7424 String8 str((const char *)gps_methods);
7425 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7426 }
7427
7428 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7429 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7430 }
7431
7432 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7433 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7434 }
7435
7436 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7437 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7438 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7439 }
7440
7441 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7442 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7443 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7444 }
7445
7446 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7447 int32_t fwk_thumb_size[2];
7448 fwk_thumb_size[0] = thumb_size->width;
7449 fwk_thumb_size[1] = thumb_size->height;
7450 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7451 }
7452
7453 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7454 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7455 privateData,
7456 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7457 }
7458
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007459 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007460 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007461 meteringMode, 1);
7462 }
7463
Thierry Strudel54dc9782017-02-15 12:12:10 -08007464 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7465 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7466 LOGD("hdr_scene_data: %d %f\n",
7467 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7468 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7469 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7470 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7471 &isHdr, 1);
7472 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7473 &isHdrConfidence, 1);
7474 }
7475
7476
7477
Thierry Strudel3d639192016-09-09 11:52:26 -07007478 if (metadata->is_tuning_params_valid) {
7479 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7480 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7481 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7482
7483
7484 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7485 sizeof(uint32_t));
7486 data += sizeof(uint32_t);
7487
7488 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7489 sizeof(uint32_t));
7490 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7491 data += sizeof(uint32_t);
7492
7493 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7494 sizeof(uint32_t));
7495 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7496 data += sizeof(uint32_t);
7497
7498 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7499 sizeof(uint32_t));
7500 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7501 data += sizeof(uint32_t);
7502
7503 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7504 sizeof(uint32_t));
7505 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7506 data += sizeof(uint32_t);
7507
7508 metadata->tuning_params.tuning_mod3_data_size = 0;
7509 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7510 sizeof(uint32_t));
7511 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7512 data += sizeof(uint32_t);
7513
7514 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7515 TUNING_SENSOR_DATA_MAX);
7516 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7517 count);
7518 data += count;
7519
7520 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7521 TUNING_VFE_DATA_MAX);
7522 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7523 count);
7524 data += count;
7525
7526 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7527 TUNING_CPP_DATA_MAX);
7528 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7529 count);
7530 data += count;
7531
7532 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7533 TUNING_CAC_DATA_MAX);
7534 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7535 count);
7536 data += count;
7537
7538 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7539 (int32_t *)(void *)tuning_meta_data_blob,
7540 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7541 }
7542
7543 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7544 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7545 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7546 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7547 NEUTRAL_COL_POINTS);
7548 }
7549
7550 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7551 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7552 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7553 }
7554
7555 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7556 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7557 // Adjust crop region from sensor output coordinate system to active
7558 // array coordinate system.
7559 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7560 hAeRegions->rect.width, hAeRegions->rect.height);
7561
7562 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7563 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7564 REGIONS_TUPLE_COUNT);
7565 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7566 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7567 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7568 hAeRegions->rect.height);
7569 }
7570
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007571 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7572 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7573 if (NAME_NOT_FOUND != val) {
7574 uint8_t fwkAfMode = (uint8_t)val;
7575 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7576 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7577 } else {
7578 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7579 val);
7580 }
7581 }
7582
Thierry Strudel3d639192016-09-09 11:52:26 -07007583 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7584 uint8_t fwk_afState = (uint8_t) *afState;
7585 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007586 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007587 }
7588
7589 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7590 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7591 }
7592
7593 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7594 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7595 }
7596
7597 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7598 uint8_t fwk_lensState = *lensState;
7599 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7600 }
7601
Thierry Strudel3d639192016-09-09 11:52:26 -07007602
7603 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007604 uint32_t ab_mode = *hal_ab_mode;
7605 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7606 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7607 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007609 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007610 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007611 if (NAME_NOT_FOUND != val) {
7612 uint8_t fwk_ab_mode = (uint8_t)val;
7613 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7614 }
7615 }
7616
7617 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7618 int val = lookupFwkName(SCENE_MODES_MAP,
7619 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7620 if (NAME_NOT_FOUND != val) {
7621 uint8_t fwkBestshotMode = (uint8_t)val;
7622 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7623 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7624 } else {
7625 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7626 }
7627 }
7628
7629 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7630 uint8_t fwk_mode = (uint8_t) *mode;
7631 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7632 }
7633
7634 /* Constant metadata values to be update*/
7635 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7636 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7637
7638 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7639 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7640
7641 int32_t hotPixelMap[2];
7642 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7643
7644 // CDS
7645 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7646 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7647 }
7648
Thierry Strudel04e026f2016-10-10 11:27:36 -07007649 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7650 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007651 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007652 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7653 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7654 } else {
7655 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7656 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007657
7658 if(fwk_hdr != curr_hdr_state) {
7659 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7660 if(fwk_hdr)
7661 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7662 else
7663 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7664 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007665 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7666 }
7667
Thierry Strudel54dc9782017-02-15 12:12:10 -08007668 //binning correction
7669 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7670 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7671 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7672 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7673 }
7674
Thierry Strudel04e026f2016-10-10 11:27:36 -07007675 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007676 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007677 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7678 int8_t is_ir_on = 0;
7679
7680 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7681 if(is_ir_on != curr_ir_state) {
7682 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7683 if(is_ir_on)
7684 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7685 else
7686 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7687 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007688 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007689 }
7690
Thierry Strudel269c81a2016-10-12 12:13:59 -07007691 // AEC SPEED
7692 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7693 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7694 }
7695
7696 // AWB SPEED
7697 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7698 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7699 }
7700
Thierry Strudel3d639192016-09-09 11:52:26 -07007701 // TNR
7702 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7703 uint8_t tnr_enable = tnr->denoise_enable;
7704 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007705 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7706 int8_t is_tnr_on = 0;
7707
7708 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7709 if(is_tnr_on != curr_tnr_state) {
7710 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7711 if(is_tnr_on)
7712 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7713 else
7714 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7715 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007716
7717 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7718 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7719 }
7720
7721 // Reprocess crop data
7722 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7723 uint8_t cnt = crop_data->num_of_streams;
7724 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7725 // mm-qcamera-daemon only posts crop_data for streams
7726 // not linked to pproc. So no valid crop metadata is not
7727 // necessarily an error case.
7728 LOGD("No valid crop metadata entries");
7729 } else {
7730 uint32_t reproc_stream_id;
7731 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7732 LOGD("No reprocessible stream found, ignore crop data");
7733 } else {
7734 int rc = NO_ERROR;
7735 Vector<int32_t> roi_map;
7736 int32_t *crop = new int32_t[cnt*4];
7737 if (NULL == crop) {
7738 rc = NO_MEMORY;
7739 }
7740 if (NO_ERROR == rc) {
7741 int32_t streams_found = 0;
7742 for (size_t i = 0; i < cnt; i++) {
7743 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7744 if (pprocDone) {
7745 // HAL already does internal reprocessing,
7746 // either via reprocessing before JPEG encoding,
7747 // or offline postprocessing for pproc bypass case.
7748 crop[0] = 0;
7749 crop[1] = 0;
7750 crop[2] = mInputStreamInfo.dim.width;
7751 crop[3] = mInputStreamInfo.dim.height;
7752 } else {
7753 crop[0] = crop_data->crop_info[i].crop.left;
7754 crop[1] = crop_data->crop_info[i].crop.top;
7755 crop[2] = crop_data->crop_info[i].crop.width;
7756 crop[3] = crop_data->crop_info[i].crop.height;
7757 }
7758 roi_map.add(crop_data->crop_info[i].roi_map.left);
7759 roi_map.add(crop_data->crop_info[i].roi_map.top);
7760 roi_map.add(crop_data->crop_info[i].roi_map.width);
7761 roi_map.add(crop_data->crop_info[i].roi_map.height);
7762 streams_found++;
7763 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7764 crop[0], crop[1], crop[2], crop[3]);
7765 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7766 crop_data->crop_info[i].roi_map.left,
7767 crop_data->crop_info[i].roi_map.top,
7768 crop_data->crop_info[i].roi_map.width,
7769 crop_data->crop_info[i].roi_map.height);
7770 break;
7771
7772 }
7773 }
7774 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7775 &streams_found, 1);
7776 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7777 crop, (size_t)(streams_found * 4));
7778 if (roi_map.array()) {
7779 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7780 roi_map.array(), roi_map.size());
7781 }
7782 }
7783 if (crop) {
7784 delete [] crop;
7785 }
7786 }
7787 }
7788 }
7789
7790 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7791 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7792 // so hardcoding the CAC result to OFF mode.
7793 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7794 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7795 } else {
7796 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7797 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7798 *cacMode);
7799 if (NAME_NOT_FOUND != val) {
7800 uint8_t resultCacMode = (uint8_t)val;
7801 // check whether CAC result from CB is equal to Framework set CAC mode
7802 // If not equal then set the CAC mode came in corresponding request
7803 if (fwk_cacMode != resultCacMode) {
7804 resultCacMode = fwk_cacMode;
7805 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007806 //Check if CAC is disabled by property
7807 if (m_cacModeDisabled) {
7808 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7809 }
7810
Thierry Strudel3d639192016-09-09 11:52:26 -07007811 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7812 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7813 } else {
7814 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7815 }
7816 }
7817 }
7818
7819 // Post blob of cam_cds_data through vendor tag.
7820 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7821 uint8_t cnt = cdsInfo->num_of_streams;
7822 cam_cds_data_t cdsDataOverride;
7823 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7824 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7825 cdsDataOverride.num_of_streams = 1;
7826 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7827 uint32_t reproc_stream_id;
7828 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7829 LOGD("No reprocessible stream found, ignore cds data");
7830 } else {
7831 for (size_t i = 0; i < cnt; i++) {
7832 if (cdsInfo->cds_info[i].stream_id ==
7833 reproc_stream_id) {
7834 cdsDataOverride.cds_info[0].cds_enable =
7835 cdsInfo->cds_info[i].cds_enable;
7836 break;
7837 }
7838 }
7839 }
7840 } else {
7841 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7842 }
7843 camMetadata.update(QCAMERA3_CDS_INFO,
7844 (uint8_t *)&cdsDataOverride,
7845 sizeof(cam_cds_data_t));
7846 }
7847
7848 // Ldaf calibration data
7849 if (!mLdafCalibExist) {
7850 IF_META_AVAILABLE(uint32_t, ldafCalib,
7851 CAM_INTF_META_LDAF_EXIF, metadata) {
7852 mLdafCalibExist = true;
7853 mLdafCalib[0] = ldafCalib[0];
7854 mLdafCalib[1] = ldafCalib[1];
7855 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7856 ldafCalib[0], ldafCalib[1]);
7857 }
7858 }
7859
Thierry Strudel54dc9782017-02-15 12:12:10 -08007860 // EXIF debug data through vendor tag
7861 /*
7862 * Mobicat Mask can assume 3 values:
7863 * 1 refers to Mobicat data,
7864 * 2 refers to Stats Debug and Exif Debug Data
7865 * 3 refers to Mobicat and Stats Debug Data
7866 * We want to make sure that we are sending Exif debug data
7867 * only when Mobicat Mask is 2.
7868 */
7869 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7870 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7871 (uint8_t *)(void *)mExifParams.debug_params,
7872 sizeof(mm_jpeg_debug_exif_params_t));
7873 }
7874
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007875 // Reprocess and DDM debug data through vendor tag
7876 cam_reprocess_info_t repro_info;
7877 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007878 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7879 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007880 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7883 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007884 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 }
7886 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7887 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007888 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007889 }
7890 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7891 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007892 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007893 }
7894 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7895 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007896 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007897 }
7898 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007899 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007900 }
7901 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7902 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007903 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007904 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007905 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7906 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7907 }
7908 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7909 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7910 }
7911 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7912 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007914 // INSTANT AEC MODE
7915 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7916 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7917 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7918 }
7919
Shuzhen Wange763e802016-03-31 10:24:29 -07007920 // AF scene change
7921 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7922 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7923 }
7924
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007925 // Enable ZSL
7926 if (enableZsl != nullptr) {
7927 uint8_t value = *enableZsl ?
7928 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7929 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7930 }
7931
Thierry Strudel3d639192016-09-09 11:52:26 -07007932 resultMetadata = camMetadata.release();
7933 return resultMetadata;
7934}
7935
7936/*===========================================================================
7937 * FUNCTION : saveExifParams
7938 *
7939 * DESCRIPTION:
7940 *
7941 * PARAMETERS :
7942 * @metadata : metadata information from callback
7943 *
7944 * RETURN : none
7945 *
7946 *==========================================================================*/
7947void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7948{
7949 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7950 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7951 if (mExifParams.debug_params) {
7952 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7953 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7954 }
7955 }
7956 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7957 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7958 if (mExifParams.debug_params) {
7959 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7960 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7961 }
7962 }
7963 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7964 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7965 if (mExifParams.debug_params) {
7966 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7967 mExifParams.debug_params->af_debug_params_valid = TRUE;
7968 }
7969 }
7970 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7971 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7972 if (mExifParams.debug_params) {
7973 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7974 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7975 }
7976 }
7977 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7978 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7979 if (mExifParams.debug_params) {
7980 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7981 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7982 }
7983 }
7984 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7985 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7986 if (mExifParams.debug_params) {
7987 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7988 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7989 }
7990 }
7991 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7992 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7993 if (mExifParams.debug_params) {
7994 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7995 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7996 }
7997 }
7998 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7999 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8000 if (mExifParams.debug_params) {
8001 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8002 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8003 }
8004 }
8005}
8006
8007/*===========================================================================
8008 * FUNCTION : get3AExifParams
8009 *
8010 * DESCRIPTION:
8011 *
8012 * PARAMETERS : none
8013 *
8014 *
8015 * RETURN : mm_jpeg_exif_params_t
8016 *
8017 *==========================================================================*/
8018mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8019{
8020 return mExifParams;
8021}
8022
8023/*===========================================================================
8024 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8025 *
8026 * DESCRIPTION:
8027 *
8028 * PARAMETERS :
8029 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008030 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8031 * urgent metadata in a batch. Always true for
8032 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008033 *
8034 * RETURN : camera_metadata_t*
8035 * metadata in a format specified by fwk
8036 *==========================================================================*/
8037camera_metadata_t*
8038QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008039 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008040{
8041 CameraMetadata camMetadata;
8042 camera_metadata_t *resultMetadata;
8043
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008044 if (!lastUrgentMetadataInBatch) {
8045 /* In batch mode, use empty metadata if this is not the last in batch
8046 */
8047 resultMetadata = allocate_camera_metadata(0, 0);
8048 return resultMetadata;
8049 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008050
8051 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8052 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8053 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8054 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8055 }
8056
8057 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8058 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8059 &aecTrigger->trigger, 1);
8060 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8061 &aecTrigger->trigger_id, 1);
8062 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8063 aecTrigger->trigger);
8064 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8065 aecTrigger->trigger_id);
8066 }
8067
8068 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8069 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8070 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8071 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8072 }
8073
Thierry Strudel3d639192016-09-09 11:52:26 -07008074 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8075 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8076 &af_trigger->trigger, 1);
8077 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8078 af_trigger->trigger);
8079 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8080 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8081 af_trigger->trigger_id);
8082 }
8083
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008084 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8085 /*af regions*/
8086 int32_t afRegions[REGIONS_TUPLE_COUNT];
8087 // Adjust crop region from sensor output coordinate system to active
8088 // array coordinate system.
8089 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8090 hAfRegions->rect.width, hAfRegions->rect.height);
8091
8092 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8093 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8094 REGIONS_TUPLE_COUNT);
8095 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8096 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8097 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8098 hAfRegions->rect.height);
8099 }
8100
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008101 // AF region confidence
8102 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8103 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8104 }
8105
Thierry Strudel3d639192016-09-09 11:52:26 -07008106 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8107 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8108 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8109 if (NAME_NOT_FOUND != val) {
8110 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8111 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8112 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8113 } else {
8114 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8115 }
8116 }
8117
8118 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8119 uint32_t aeMode = CAM_AE_MODE_MAX;
8120 int32_t flashMode = CAM_FLASH_MODE_MAX;
8121 int32_t redeye = -1;
8122 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8123 aeMode = *pAeMode;
8124 }
8125 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8126 flashMode = *pFlashMode;
8127 }
8128 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8129 redeye = *pRedeye;
8130 }
8131
8132 if (1 == redeye) {
8133 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8134 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8135 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8136 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8137 flashMode);
8138 if (NAME_NOT_FOUND != val) {
8139 fwk_aeMode = (uint8_t)val;
8140 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8141 } else {
8142 LOGE("Unsupported flash mode %d", flashMode);
8143 }
8144 } else if (aeMode == CAM_AE_MODE_ON) {
8145 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8146 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8147 } else if (aeMode == CAM_AE_MODE_OFF) {
8148 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8149 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008150 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8151 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8152 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 } else {
8154 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8155 "flashMode:%d, aeMode:%u!!!",
8156 redeye, flashMode, aeMode);
8157 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008158 if (mInstantAEC) {
8159 // Increment frame Idx count untill a bound reached for instant AEC.
8160 mInstantAecFrameIdxCount++;
8161 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8162 CAM_INTF_META_AEC_INFO, metadata) {
8163 LOGH("ae_params->settled = %d",ae_params->settled);
8164 // If AEC settled, or if number of frames reached bound value,
8165 // should reset instant AEC.
8166 if (ae_params->settled ||
8167 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8168 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8169 mInstantAEC = false;
8170 mResetInstantAEC = true;
8171 mInstantAecFrameIdxCount = 0;
8172 }
8173 }
8174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008175 resultMetadata = camMetadata.release();
8176 return resultMetadata;
8177}
8178
8179/*===========================================================================
8180 * FUNCTION : dumpMetadataToFile
8181 *
8182 * DESCRIPTION: Dumps tuning metadata to file system
8183 *
8184 * PARAMETERS :
8185 * @meta : tuning metadata
8186 * @dumpFrameCount : current dump frame count
8187 * @enabled : Enable mask
8188 *
8189 *==========================================================================*/
8190void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8191 uint32_t &dumpFrameCount,
8192 bool enabled,
8193 const char *type,
8194 uint32_t frameNumber)
8195{
8196 //Some sanity checks
8197 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8198 LOGE("Tuning sensor data size bigger than expected %d: %d",
8199 meta.tuning_sensor_data_size,
8200 TUNING_SENSOR_DATA_MAX);
8201 return;
8202 }
8203
8204 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8205 LOGE("Tuning VFE data size bigger than expected %d: %d",
8206 meta.tuning_vfe_data_size,
8207 TUNING_VFE_DATA_MAX);
8208 return;
8209 }
8210
8211 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8212 LOGE("Tuning CPP data size bigger than expected %d: %d",
8213 meta.tuning_cpp_data_size,
8214 TUNING_CPP_DATA_MAX);
8215 return;
8216 }
8217
8218 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8219 LOGE("Tuning CAC data size bigger than expected %d: %d",
8220 meta.tuning_cac_data_size,
8221 TUNING_CAC_DATA_MAX);
8222 return;
8223 }
8224 //
8225
8226 if(enabled){
8227 char timeBuf[FILENAME_MAX];
8228 char buf[FILENAME_MAX];
8229 memset(buf, 0, sizeof(buf));
8230 memset(timeBuf, 0, sizeof(timeBuf));
8231 time_t current_time;
8232 struct tm * timeinfo;
8233 time (&current_time);
8234 timeinfo = localtime (&current_time);
8235 if (timeinfo != NULL) {
8236 strftime (timeBuf, sizeof(timeBuf),
8237 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8238 }
8239 String8 filePath(timeBuf);
8240 snprintf(buf,
8241 sizeof(buf),
8242 "%dm_%s_%d.bin",
8243 dumpFrameCount,
8244 type,
8245 frameNumber);
8246 filePath.append(buf);
8247 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8248 if (file_fd >= 0) {
8249 ssize_t written_len = 0;
8250 meta.tuning_data_version = TUNING_DATA_VERSION;
8251 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8252 written_len += write(file_fd, data, sizeof(uint32_t));
8253 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8254 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8255 written_len += write(file_fd, data, sizeof(uint32_t));
8256 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8257 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8258 written_len += write(file_fd, data, sizeof(uint32_t));
8259 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8260 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8261 written_len += write(file_fd, data, sizeof(uint32_t));
8262 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8263 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8264 written_len += write(file_fd, data, sizeof(uint32_t));
8265 meta.tuning_mod3_data_size = 0;
8266 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8267 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8268 written_len += write(file_fd, data, sizeof(uint32_t));
8269 size_t total_size = meta.tuning_sensor_data_size;
8270 data = (void *)((uint8_t *)&meta.data);
8271 written_len += write(file_fd, data, total_size);
8272 total_size = meta.tuning_vfe_data_size;
8273 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8274 written_len += write(file_fd, data, total_size);
8275 total_size = meta.tuning_cpp_data_size;
8276 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8277 written_len += write(file_fd, data, total_size);
8278 total_size = meta.tuning_cac_data_size;
8279 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8280 written_len += write(file_fd, data, total_size);
8281 close(file_fd);
8282 }else {
8283 LOGE("fail to open file for metadata dumping");
8284 }
8285 }
8286}
8287
8288/*===========================================================================
8289 * FUNCTION : cleanAndSortStreamInfo
8290 *
8291 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8292 * and sort them such that raw stream is at the end of the list
8293 * This is a workaround for camera daemon constraint.
8294 *
8295 * PARAMETERS : None
8296 *
8297 *==========================================================================*/
8298void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8299{
8300 List<stream_info_t *> newStreamInfo;
8301
8302 /*clean up invalid streams*/
8303 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8304 it != mStreamInfo.end();) {
8305 if(((*it)->status) == INVALID){
8306 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8307 delete channel;
8308 free(*it);
8309 it = mStreamInfo.erase(it);
8310 } else {
8311 it++;
8312 }
8313 }
8314
8315 // Move preview/video/callback/snapshot streams into newList
8316 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8317 it != mStreamInfo.end();) {
8318 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8319 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8320 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8321 newStreamInfo.push_back(*it);
8322 it = mStreamInfo.erase(it);
8323 } else
8324 it++;
8325 }
8326 // Move raw streams into newList
8327 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8328 it != mStreamInfo.end();) {
8329 newStreamInfo.push_back(*it);
8330 it = mStreamInfo.erase(it);
8331 }
8332
8333 mStreamInfo = newStreamInfo;
8334}
8335
8336/*===========================================================================
8337 * FUNCTION : extractJpegMetadata
8338 *
8339 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8340 * JPEG metadata is cached in HAL, and return as part of capture
8341 * result when metadata is returned from camera daemon.
8342 *
8343 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8344 * @request: capture request
8345 *
8346 *==========================================================================*/
8347void QCamera3HardwareInterface::extractJpegMetadata(
8348 CameraMetadata& jpegMetadata,
8349 const camera3_capture_request_t *request)
8350{
8351 CameraMetadata frame_settings;
8352 frame_settings = request->settings;
8353
8354 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8355 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8356 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8357 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8358
8359 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8360 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8361 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8362 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8363
8364 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8365 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8366 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8367 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8368
8369 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8370 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8371 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8372 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8373
8374 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8375 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8376 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8377 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8378
8379 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8380 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8381 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8382 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8383
8384 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8385 int32_t thumbnail_size[2];
8386 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8387 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8388 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8389 int32_t orientation =
8390 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008391 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008392 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8393 int32_t temp;
8394 temp = thumbnail_size[0];
8395 thumbnail_size[0] = thumbnail_size[1];
8396 thumbnail_size[1] = temp;
8397 }
8398 }
8399 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8400 thumbnail_size,
8401 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8402 }
8403
8404}
8405
8406/*===========================================================================
8407 * FUNCTION : convertToRegions
8408 *
8409 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8410 *
8411 * PARAMETERS :
8412 * @rect : cam_rect_t struct to convert
8413 * @region : int32_t destination array
8414 * @weight : if we are converting from cam_area_t, weight is valid
8415 * else weight = -1
8416 *
8417 *==========================================================================*/
8418void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8419 int32_t *region, int weight)
8420{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008421 region[FACE_LEFT] = rect.left;
8422 region[FACE_TOP] = rect.top;
8423 region[FACE_RIGHT] = rect.left + rect.width;
8424 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008425 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008426 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008427 }
8428}
8429
8430/*===========================================================================
8431 * FUNCTION : convertFromRegions
8432 *
8433 * DESCRIPTION: helper method to convert from array to cam_rect_t
8434 *
8435 * PARAMETERS :
8436 * @rect : cam_rect_t struct to convert
8437 * @region : int32_t destination array
8438 * @weight : if we are converting from cam_area_t, weight is valid
8439 * else weight = -1
8440 *
8441 *==========================================================================*/
8442void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008443 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008444{
Thierry Strudel3d639192016-09-09 11:52:26 -07008445 int32_t x_min = frame_settings.find(tag).data.i32[0];
8446 int32_t y_min = frame_settings.find(tag).data.i32[1];
8447 int32_t x_max = frame_settings.find(tag).data.i32[2];
8448 int32_t y_max = frame_settings.find(tag).data.i32[3];
8449 roi.weight = frame_settings.find(tag).data.i32[4];
8450 roi.rect.left = x_min;
8451 roi.rect.top = y_min;
8452 roi.rect.width = x_max - x_min;
8453 roi.rect.height = y_max - y_min;
8454}
8455
8456/*===========================================================================
8457 * FUNCTION : resetIfNeededROI
8458 *
8459 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8460 * crop region
8461 *
8462 * PARAMETERS :
8463 * @roi : cam_area_t struct to resize
8464 * @scalerCropRegion : cam_crop_region_t region to compare against
8465 *
8466 *
8467 *==========================================================================*/
8468bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8469 const cam_crop_region_t* scalerCropRegion)
8470{
8471 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8472 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8473 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8474 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8475
8476 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8477 * without having this check the calculations below to validate if the roi
8478 * is inside scalar crop region will fail resulting in the roi not being
8479 * reset causing algorithm to continue to use stale roi window
8480 */
8481 if (roi->weight == 0) {
8482 return true;
8483 }
8484
8485 if ((roi_x_max < scalerCropRegion->left) ||
8486 // right edge of roi window is left of scalar crop's left edge
8487 (roi_y_max < scalerCropRegion->top) ||
8488 // bottom edge of roi window is above scalar crop's top edge
8489 (roi->rect.left > crop_x_max) ||
8490 // left edge of roi window is beyond(right) of scalar crop's right edge
8491 (roi->rect.top > crop_y_max)){
8492 // top edge of roi windo is above scalar crop's top edge
8493 return false;
8494 }
8495 if (roi->rect.left < scalerCropRegion->left) {
8496 roi->rect.left = scalerCropRegion->left;
8497 }
8498 if (roi->rect.top < scalerCropRegion->top) {
8499 roi->rect.top = scalerCropRegion->top;
8500 }
8501 if (roi_x_max > crop_x_max) {
8502 roi_x_max = crop_x_max;
8503 }
8504 if (roi_y_max > crop_y_max) {
8505 roi_y_max = crop_y_max;
8506 }
8507 roi->rect.width = roi_x_max - roi->rect.left;
8508 roi->rect.height = roi_y_max - roi->rect.top;
8509 return true;
8510}
8511
8512/*===========================================================================
8513 * FUNCTION : convertLandmarks
8514 *
8515 * DESCRIPTION: helper method to extract the landmarks from face detection info
8516 *
8517 * PARAMETERS :
8518 * @landmark_data : input landmark data to be converted
8519 * @landmarks : int32_t destination array
8520 *
8521 *
8522 *==========================================================================*/
8523void QCamera3HardwareInterface::convertLandmarks(
8524 cam_face_landmarks_info_t landmark_data,
8525 int32_t *landmarks)
8526{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008527 if (landmark_data.is_left_eye_valid) {
8528 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8529 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8530 } else {
8531 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8532 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8533 }
8534
8535 if (landmark_data.is_right_eye_valid) {
8536 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8537 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8538 } else {
8539 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8540 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8541 }
8542
8543 if (landmark_data.is_mouth_valid) {
8544 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8545 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8546 } else {
8547 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8548 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8549 }
8550}
8551
8552/*===========================================================================
8553 * FUNCTION : setInvalidLandmarks
8554 *
8555 * DESCRIPTION: helper method to set invalid landmarks
8556 *
8557 * PARAMETERS :
8558 * @landmarks : int32_t destination array
8559 *
8560 *
8561 *==========================================================================*/
8562void QCamera3HardwareInterface::setInvalidLandmarks(
8563 int32_t *landmarks)
8564{
8565 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8566 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8567 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8568 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8569 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8570 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008571}
8572
8573#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008574
8575/*===========================================================================
8576 * FUNCTION : getCapabilities
8577 *
8578 * DESCRIPTION: query camera capability from back-end
8579 *
8580 * PARAMETERS :
8581 * @ops : mm-interface ops structure
8582 * @cam_handle : camera handle for which we need capability
8583 *
8584 * RETURN : ptr type of capability structure
8585 * capability for success
8586 * NULL for failure
8587 *==========================================================================*/
8588cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8589 uint32_t cam_handle)
8590{
8591 int rc = NO_ERROR;
8592 QCamera3HeapMemory *capabilityHeap = NULL;
8593 cam_capability_t *cap_ptr = NULL;
8594
8595 if (ops == NULL) {
8596 LOGE("Invalid arguments");
8597 return NULL;
8598 }
8599
8600 capabilityHeap = new QCamera3HeapMemory(1);
8601 if (capabilityHeap == NULL) {
8602 LOGE("creation of capabilityHeap failed");
8603 return NULL;
8604 }
8605
8606 /* Allocate memory for capability buffer */
8607 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8608 if(rc != OK) {
8609 LOGE("No memory for cappability");
8610 goto allocate_failed;
8611 }
8612
8613 /* Map memory for capability buffer */
8614 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8615
8616 rc = ops->map_buf(cam_handle,
8617 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8618 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8619 if(rc < 0) {
8620 LOGE("failed to map capability buffer");
8621 rc = FAILED_TRANSACTION;
8622 goto map_failed;
8623 }
8624
8625 /* Query Capability */
8626 rc = ops->query_capability(cam_handle);
8627 if(rc < 0) {
8628 LOGE("failed to query capability");
8629 rc = FAILED_TRANSACTION;
8630 goto query_failed;
8631 }
8632
8633 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8634 if (cap_ptr == NULL) {
8635 LOGE("out of memory");
8636 rc = NO_MEMORY;
8637 goto query_failed;
8638 }
8639
8640 memset(cap_ptr, 0, sizeof(cam_capability_t));
8641 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8642
8643 int index;
8644 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8645 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8646 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8647 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8648 }
8649
8650query_failed:
8651 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8652map_failed:
8653 capabilityHeap->deallocate();
8654allocate_failed:
8655 delete capabilityHeap;
8656
8657 if (rc != NO_ERROR) {
8658 return NULL;
8659 } else {
8660 return cap_ptr;
8661 }
8662}
8663
Thierry Strudel3d639192016-09-09 11:52:26 -07008664/*===========================================================================
8665 * FUNCTION : initCapabilities
8666 *
8667 * DESCRIPTION: initialize camera capabilities in static data struct
8668 *
8669 * PARAMETERS :
8670 * @cameraId : camera Id
8671 *
8672 * RETURN : int32_t type of status
8673 * NO_ERROR -- success
8674 * none-zero failure code
8675 *==========================================================================*/
8676int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8677{
8678 int rc = 0;
8679 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008680 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008681
8682 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8683 if (rc) {
8684 LOGE("camera_open failed. rc = %d", rc);
8685 goto open_failed;
8686 }
8687 if (!cameraHandle) {
8688 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8689 goto open_failed;
8690 }
8691
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008692 handle = get_main_camera_handle(cameraHandle->camera_handle);
8693 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8694 if (gCamCapability[cameraId] == NULL) {
8695 rc = FAILED_TRANSACTION;
8696 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008697 }
8698
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008699 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008700 if (is_dual_camera_by_idx(cameraId)) {
8701 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8702 gCamCapability[cameraId]->aux_cam_cap =
8703 getCapabilities(cameraHandle->ops, handle);
8704 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8705 rc = FAILED_TRANSACTION;
8706 free(gCamCapability[cameraId]);
8707 goto failed_op;
8708 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008709
8710 // Copy the main camera capability to main_cam_cap struct
8711 gCamCapability[cameraId]->main_cam_cap =
8712 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8713 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8714 LOGE("out of memory");
8715 rc = NO_MEMORY;
8716 goto failed_op;
8717 }
8718 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8719 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008720 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008721failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008722 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8723 cameraHandle = NULL;
8724open_failed:
8725 return rc;
8726}
8727
8728/*==========================================================================
8729 * FUNCTION : get3Aversion
8730 *
8731 * DESCRIPTION: get the Q3A S/W version
8732 *
8733 * PARAMETERS :
8734 * @sw_version: Reference of Q3A structure which will hold version info upon
8735 * return
8736 *
8737 * RETURN : None
8738 *
8739 *==========================================================================*/
8740void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8741{
8742 if(gCamCapability[mCameraId])
8743 sw_version = gCamCapability[mCameraId]->q3a_version;
8744 else
8745 LOGE("Capability structure NULL!");
8746}
8747
8748
8749/*===========================================================================
8750 * FUNCTION : initParameters
8751 *
8752 * DESCRIPTION: initialize camera parameters
8753 *
8754 * PARAMETERS :
8755 *
8756 * RETURN : int32_t type of status
8757 * NO_ERROR -- success
8758 * none-zero failure code
8759 *==========================================================================*/
8760int QCamera3HardwareInterface::initParameters()
8761{
8762 int rc = 0;
8763
8764 //Allocate Set Param Buffer
8765 mParamHeap = new QCamera3HeapMemory(1);
8766 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8767 if(rc != OK) {
8768 rc = NO_MEMORY;
8769 LOGE("Failed to allocate SETPARM Heap memory");
8770 delete mParamHeap;
8771 mParamHeap = NULL;
8772 return rc;
8773 }
8774
8775 //Map memory for parameters buffer
8776 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8777 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8778 mParamHeap->getFd(0),
8779 sizeof(metadata_buffer_t),
8780 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8781 if(rc < 0) {
8782 LOGE("failed to map SETPARM buffer");
8783 rc = FAILED_TRANSACTION;
8784 mParamHeap->deallocate();
8785 delete mParamHeap;
8786 mParamHeap = NULL;
8787 return rc;
8788 }
8789
8790 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8791
8792 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8793 return rc;
8794}
8795
8796/*===========================================================================
8797 * FUNCTION : deinitParameters
8798 *
8799 * DESCRIPTION: de-initialize camera parameters
8800 *
8801 * PARAMETERS :
8802 *
8803 * RETURN : NONE
8804 *==========================================================================*/
8805void QCamera3HardwareInterface::deinitParameters()
8806{
8807 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8808 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8809
8810 mParamHeap->deallocate();
8811 delete mParamHeap;
8812 mParamHeap = NULL;
8813
8814 mParameters = NULL;
8815
8816 free(mPrevParameters);
8817 mPrevParameters = NULL;
8818}
8819
8820/*===========================================================================
8821 * FUNCTION : calcMaxJpegSize
8822 *
8823 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8824 *
8825 * PARAMETERS :
8826 *
8827 * RETURN : max_jpeg_size
8828 *==========================================================================*/
8829size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8830{
8831 size_t max_jpeg_size = 0;
8832 size_t temp_width, temp_height;
8833 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8834 MAX_SIZES_CNT);
8835 for (size_t i = 0; i < count; i++) {
8836 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8837 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8838 if (temp_width * temp_height > max_jpeg_size ) {
8839 max_jpeg_size = temp_width * temp_height;
8840 }
8841 }
8842 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8843 return max_jpeg_size;
8844}
8845
8846/*===========================================================================
8847 * FUNCTION : getMaxRawSize
8848 *
8849 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8850 *
8851 * PARAMETERS :
8852 *
8853 * RETURN : Largest supported Raw Dimension
8854 *==========================================================================*/
8855cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8856{
8857 int max_width = 0;
8858 cam_dimension_t maxRawSize;
8859
8860 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8861 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8862 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8863 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8864 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8865 }
8866 }
8867 return maxRawSize;
8868}
8869
8870
8871/*===========================================================================
8872 * FUNCTION : calcMaxJpegDim
8873 *
8874 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8875 *
8876 * PARAMETERS :
8877 *
8878 * RETURN : max_jpeg_dim
8879 *==========================================================================*/
8880cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8881{
8882 cam_dimension_t max_jpeg_dim;
8883 cam_dimension_t curr_jpeg_dim;
8884 max_jpeg_dim.width = 0;
8885 max_jpeg_dim.height = 0;
8886 curr_jpeg_dim.width = 0;
8887 curr_jpeg_dim.height = 0;
8888 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8889 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8890 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8891 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8892 max_jpeg_dim.width * max_jpeg_dim.height ) {
8893 max_jpeg_dim.width = curr_jpeg_dim.width;
8894 max_jpeg_dim.height = curr_jpeg_dim.height;
8895 }
8896 }
8897 return max_jpeg_dim;
8898}
8899
8900/*===========================================================================
8901 * FUNCTION : addStreamConfig
8902 *
8903 * DESCRIPTION: adds the stream configuration to the array
8904 *
8905 * PARAMETERS :
8906 * @available_stream_configs : pointer to stream configuration array
8907 * @scalar_format : scalar format
8908 * @dim : configuration dimension
8909 * @config_type : input or output configuration type
8910 *
8911 * RETURN : NONE
8912 *==========================================================================*/
8913void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8914 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8915{
8916 available_stream_configs.add(scalar_format);
8917 available_stream_configs.add(dim.width);
8918 available_stream_configs.add(dim.height);
8919 available_stream_configs.add(config_type);
8920}
8921
8922/*===========================================================================
8923 * FUNCTION : suppportBurstCapture
8924 *
8925 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8926 *
8927 * PARAMETERS :
8928 * @cameraId : camera Id
8929 *
8930 * RETURN : true if camera supports BURST_CAPTURE
8931 * false otherwise
8932 *==========================================================================*/
8933bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8934{
8935 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8936 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8937 const int32_t highResWidth = 3264;
8938 const int32_t highResHeight = 2448;
8939
8940 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8941 // Maximum resolution images cannot be captured at >= 10fps
8942 // -> not supporting BURST_CAPTURE
8943 return false;
8944 }
8945
8946 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8947 // Maximum resolution images can be captured at >= 20fps
8948 // --> supporting BURST_CAPTURE
8949 return true;
8950 }
8951
8952 // Find the smallest highRes resolution, or largest resolution if there is none
8953 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8954 MAX_SIZES_CNT);
8955 size_t highRes = 0;
8956 while ((highRes + 1 < totalCnt) &&
8957 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8958 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8959 highResWidth * highResHeight)) {
8960 highRes++;
8961 }
8962 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8963 return true;
8964 } else {
8965 return false;
8966 }
8967}
8968
8969/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008970 * FUNCTION : getPDStatIndex
8971 *
8972 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8973 *
8974 * PARAMETERS :
8975 * @caps : camera capabilities
8976 *
8977 * RETURN : int32_t type
8978 * non-negative - on success
8979 * -1 - on failure
8980 *==========================================================================*/
8981int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8982 if (nullptr == caps) {
8983 return -1;
8984 }
8985
8986 uint32_t metaRawCount = caps->meta_raw_channel_count;
8987 int32_t ret = -1;
8988 for (size_t i = 0; i < metaRawCount; i++) {
8989 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8990 ret = i;
8991 break;
8992 }
8993 }
8994
8995 return ret;
8996}
8997
8998/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008999 * FUNCTION : initStaticMetadata
9000 *
9001 * DESCRIPTION: initialize the static metadata
9002 *
9003 * PARAMETERS :
9004 * @cameraId : camera Id
9005 *
9006 * RETURN : int32_t type of status
9007 * 0 -- success
9008 * non-zero failure code
9009 *==========================================================================*/
9010int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9011{
9012 int rc = 0;
9013 CameraMetadata staticInfo;
9014 size_t count = 0;
9015 bool limitedDevice = false;
9016 char prop[PROPERTY_VALUE_MAX];
9017 bool supportBurst = false;
9018
9019 supportBurst = supportBurstCapture(cameraId);
9020
9021 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9022 * guaranteed or if min fps of max resolution is less than 20 fps, its
9023 * advertised as limited device*/
9024 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9025 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9026 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9027 !supportBurst;
9028
9029 uint8_t supportedHwLvl = limitedDevice ?
9030 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009031#ifndef USE_HAL_3_3
9032 // LEVEL_3 - This device will support level 3.
9033 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9034#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009035 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009036#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009037
9038 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9039 &supportedHwLvl, 1);
9040
9041 bool facingBack = false;
9042 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9043 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9044 facingBack = true;
9045 }
9046 /*HAL 3 only*/
9047 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9048 &gCamCapability[cameraId]->min_focus_distance, 1);
9049
9050 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9051 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9052
9053 /*should be using focal lengths but sensor doesn't provide that info now*/
9054 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9055 &gCamCapability[cameraId]->focal_length,
9056 1);
9057
9058 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9059 gCamCapability[cameraId]->apertures,
9060 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9061
9062 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9063 gCamCapability[cameraId]->filter_densities,
9064 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9065
9066
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009067 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9068 size_t mode_count =
9069 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9070 for (size_t i = 0; i < mode_count; i++) {
9071 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009074 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009075
9076 int32_t lens_shading_map_size[] = {
9077 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9078 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9079 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9080 lens_shading_map_size,
9081 sizeof(lens_shading_map_size)/sizeof(int32_t));
9082
9083 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9084 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9085
9086 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9087 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9088
9089 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9090 &gCamCapability[cameraId]->max_frame_duration, 1);
9091
9092 camera_metadata_rational baseGainFactor = {
9093 gCamCapability[cameraId]->base_gain_factor.numerator,
9094 gCamCapability[cameraId]->base_gain_factor.denominator};
9095 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9096 &baseGainFactor, 1);
9097
9098 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9099 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9100
9101 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9102 gCamCapability[cameraId]->pixel_array_size.height};
9103 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9104 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9105
9106 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9107 gCamCapability[cameraId]->active_array_size.top,
9108 gCamCapability[cameraId]->active_array_size.width,
9109 gCamCapability[cameraId]->active_array_size.height};
9110 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9111 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9112
9113 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9114 &gCamCapability[cameraId]->white_level, 1);
9115
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009116 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9117 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9118 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009119 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009120 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009121
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009122#ifndef USE_HAL_3_3
9123 bool hasBlackRegions = false;
9124 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9125 LOGW("black_region_count: %d is bounded to %d",
9126 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9127 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9128 }
9129 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9130 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9131 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9132 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9133 }
9134 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9135 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9136 hasBlackRegions = true;
9137 }
9138#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009139 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9140 &gCamCapability[cameraId]->flash_charge_duration, 1);
9141
9142 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9143 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9144
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009145 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9146 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9147 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009148 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9149 &timestampSource, 1);
9150
Thierry Strudel54dc9782017-02-15 12:12:10 -08009151 //update histogram vendor data
9152 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009153 &gCamCapability[cameraId]->histogram_size, 1);
9154
Thierry Strudel54dc9782017-02-15 12:12:10 -08009155 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009156 &gCamCapability[cameraId]->max_histogram_count, 1);
9157
Shuzhen Wang14415f52016-11-16 18:26:18 -08009158 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9159 //so that app can request fewer number of bins than the maximum supported.
9160 std::vector<int32_t> histBins;
9161 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9162 histBins.push_back(maxHistBins);
9163 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9164 (maxHistBins & 0x1) == 0) {
9165 histBins.push_back(maxHistBins >> 1);
9166 maxHistBins >>= 1;
9167 }
9168 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9169 histBins.data(), histBins.size());
9170
Thierry Strudel3d639192016-09-09 11:52:26 -07009171 int32_t sharpness_map_size[] = {
9172 gCamCapability[cameraId]->sharpness_map_size.width,
9173 gCamCapability[cameraId]->sharpness_map_size.height};
9174
9175 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9176 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9177
9178 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9179 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9180
Emilian Peev0f3c3162017-03-15 12:57:46 +00009181 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9182 if (0 <= indexPD) {
9183 // Advertise PD stats data as part of the Depth capabilities
9184 int32_t depthWidth =
9185 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9186 int32_t depthHeight =
9187 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9188 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9189 assert(0 < depthSamplesCount);
9190 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9191 &depthSamplesCount, 1);
9192
9193 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9194 depthHeight,
9195 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9196 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9197 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9198 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9199 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9200
9201 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9202 depthHeight, 33333333,
9203 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9204 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9205 depthMinDuration,
9206 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9207
9208 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9209 depthHeight, 0,
9210 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9211 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9212 depthStallDuration,
9213 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9214
9215 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9216 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9217 }
9218
Thierry Strudel3d639192016-09-09 11:52:26 -07009219 int32_t scalar_formats[] = {
9220 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9221 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9222 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9223 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9224 HAL_PIXEL_FORMAT_RAW10,
9225 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009226 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9227 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9228 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009229
9230 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9231 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9232 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9233 count, MAX_SIZES_CNT, available_processed_sizes);
9234 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9235 available_processed_sizes, count * 2);
9236
9237 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9238 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9239 makeTable(gCamCapability[cameraId]->raw_dim,
9240 count, MAX_SIZES_CNT, available_raw_sizes);
9241 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9242 available_raw_sizes, count * 2);
9243
9244 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9245 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9246 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9247 count, MAX_SIZES_CNT, available_fps_ranges);
9248 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9249 available_fps_ranges, count * 2);
9250
9251 camera_metadata_rational exposureCompensationStep = {
9252 gCamCapability[cameraId]->exp_compensation_step.numerator,
9253 gCamCapability[cameraId]->exp_compensation_step.denominator};
9254 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9255 &exposureCompensationStep, 1);
9256
9257 Vector<uint8_t> availableVstabModes;
9258 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9259 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009260 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009261 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009262 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009263 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009264 count = IS_TYPE_MAX;
9265 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9266 for (size_t i = 0; i < count; i++) {
9267 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9268 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9269 eisSupported = true;
9270 break;
9271 }
9272 }
9273 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009274 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9275 }
9276 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9277 availableVstabModes.array(), availableVstabModes.size());
9278
9279 /*HAL 1 and HAL 3 common*/
9280 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9281 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9282 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009283 // Cap the max zoom to the max preferred value
9284 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009285 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9286 &maxZoom, 1);
9287
9288 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9289 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9290
9291 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9292 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9293 max3aRegions[2] = 0; /* AF not supported */
9294 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9295 max3aRegions, 3);
9296
9297 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9298 memset(prop, 0, sizeof(prop));
9299 property_get("persist.camera.facedetect", prop, "1");
9300 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9301 LOGD("Support face detection mode: %d",
9302 supportedFaceDetectMode);
9303
9304 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009305 /* support mode should be OFF if max number of face is 0 */
9306 if (maxFaces <= 0) {
9307 supportedFaceDetectMode = 0;
9308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009309 Vector<uint8_t> availableFaceDetectModes;
9310 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9311 if (supportedFaceDetectMode == 1) {
9312 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9313 } else if (supportedFaceDetectMode == 2) {
9314 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9315 } else if (supportedFaceDetectMode == 3) {
9316 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9317 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9318 } else {
9319 maxFaces = 0;
9320 }
9321 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9322 availableFaceDetectModes.array(),
9323 availableFaceDetectModes.size());
9324 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9325 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009326 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9327 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9328 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009329
9330 int32_t exposureCompensationRange[] = {
9331 gCamCapability[cameraId]->exposure_compensation_min,
9332 gCamCapability[cameraId]->exposure_compensation_max};
9333 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9334 exposureCompensationRange,
9335 sizeof(exposureCompensationRange)/sizeof(int32_t));
9336
9337 uint8_t lensFacing = (facingBack) ?
9338 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9339 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9340
9341 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9342 available_thumbnail_sizes,
9343 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9344
9345 /*all sizes will be clubbed into this tag*/
9346 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9347 /*android.scaler.availableStreamConfigurations*/
9348 Vector<int32_t> available_stream_configs;
9349 cam_dimension_t active_array_dim;
9350 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9351 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009352
9353 /*advertise list of input dimensions supported based on below property.
9354 By default all sizes upto 5MP will be advertised.
9355 Note that the setprop resolution format should be WxH.
9356 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9357 To list all supported sizes, setprop needs to be set with "0x0" */
9358 cam_dimension_t minInputSize = {2592,1944}; //5MP
9359 memset(prop, 0, sizeof(prop));
9360 property_get("persist.camera.input.minsize", prop, "2592x1944");
9361 if (strlen(prop) > 0) {
9362 char *saveptr = NULL;
9363 char *token = strtok_r(prop, "x", &saveptr);
9364 if (token != NULL) {
9365 minInputSize.width = atoi(token);
9366 }
9367 token = strtok_r(NULL, "x", &saveptr);
9368 if (token != NULL) {
9369 minInputSize.height = atoi(token);
9370 }
9371 }
9372
Thierry Strudel3d639192016-09-09 11:52:26 -07009373 /* Add input/output stream configurations for each scalar formats*/
9374 for (size_t j = 0; j < scalar_formats_count; j++) {
9375 switch (scalar_formats[j]) {
9376 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9377 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9378 case HAL_PIXEL_FORMAT_RAW10:
9379 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9380 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9381 addStreamConfig(available_stream_configs, scalar_formats[j],
9382 gCamCapability[cameraId]->raw_dim[i],
9383 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9384 }
9385 break;
9386 case HAL_PIXEL_FORMAT_BLOB:
9387 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9388 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9389 addStreamConfig(available_stream_configs, scalar_formats[j],
9390 gCamCapability[cameraId]->picture_sizes_tbl[i],
9391 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9392 }
9393 break;
9394 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9395 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9396 default:
9397 cam_dimension_t largest_picture_size;
9398 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9399 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9400 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9401 addStreamConfig(available_stream_configs, scalar_formats[j],
9402 gCamCapability[cameraId]->picture_sizes_tbl[i],
9403 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009404 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9405 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9406 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9407 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9408 >= minInputSize.width) || (gCamCapability[cameraId]->
9409 picture_sizes_tbl[i].height >= minInputSize.height)) {
9410 addStreamConfig(available_stream_configs, scalar_formats[j],
9411 gCamCapability[cameraId]->picture_sizes_tbl[i],
9412 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9413 }
9414 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009415 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009416
Thierry Strudel3d639192016-09-09 11:52:26 -07009417 break;
9418 }
9419 }
9420
9421 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9422 available_stream_configs.array(), available_stream_configs.size());
9423 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9424 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9425
9426 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9427 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9428
9429 /* android.scaler.availableMinFrameDurations */
9430 Vector<int64_t> available_min_durations;
9431 for (size_t j = 0; j < scalar_formats_count; j++) {
9432 switch (scalar_formats[j]) {
9433 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9434 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9435 case HAL_PIXEL_FORMAT_RAW10:
9436 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9437 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9438 available_min_durations.add(scalar_formats[j]);
9439 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9440 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9441 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9442 }
9443 break;
9444 default:
9445 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9446 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9447 available_min_durations.add(scalar_formats[j]);
9448 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9449 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9450 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9451 }
9452 break;
9453 }
9454 }
9455 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9456 available_min_durations.array(), available_min_durations.size());
9457
9458 Vector<int32_t> available_hfr_configs;
9459 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9460 int32_t fps = 0;
9461 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9462 case CAM_HFR_MODE_60FPS:
9463 fps = 60;
9464 break;
9465 case CAM_HFR_MODE_90FPS:
9466 fps = 90;
9467 break;
9468 case CAM_HFR_MODE_120FPS:
9469 fps = 120;
9470 break;
9471 case CAM_HFR_MODE_150FPS:
9472 fps = 150;
9473 break;
9474 case CAM_HFR_MODE_180FPS:
9475 fps = 180;
9476 break;
9477 case CAM_HFR_MODE_210FPS:
9478 fps = 210;
9479 break;
9480 case CAM_HFR_MODE_240FPS:
9481 fps = 240;
9482 break;
9483 case CAM_HFR_MODE_480FPS:
9484 fps = 480;
9485 break;
9486 case CAM_HFR_MODE_OFF:
9487 case CAM_HFR_MODE_MAX:
9488 default:
9489 break;
9490 }
9491
9492 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9493 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9494 /* For each HFR frame rate, need to advertise one variable fps range
9495 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9496 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9497 * set by the app. When video recording is started, [120, 120] is
9498 * set. This way sensor configuration does not change when recording
9499 * is started */
9500
9501 /* (width, height, fps_min, fps_max, batch_size_max) */
9502 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9503 j < MAX_SIZES_CNT; j++) {
9504 available_hfr_configs.add(
9505 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9506 available_hfr_configs.add(
9507 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9508 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9509 available_hfr_configs.add(fps);
9510 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9511
9512 /* (width, height, fps_min, fps_max, batch_size_max) */
9513 available_hfr_configs.add(
9514 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9515 available_hfr_configs.add(
9516 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9517 available_hfr_configs.add(fps);
9518 available_hfr_configs.add(fps);
9519 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9520 }
9521 }
9522 }
9523 //Advertise HFR capability only if the property is set
9524 memset(prop, 0, sizeof(prop));
9525 property_get("persist.camera.hal3hfr.enable", prop, "1");
9526 uint8_t hfrEnable = (uint8_t)atoi(prop);
9527
9528 if(hfrEnable && available_hfr_configs.array()) {
9529 staticInfo.update(
9530 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9531 available_hfr_configs.array(), available_hfr_configs.size());
9532 }
9533
9534 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9535 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9536 &max_jpeg_size, 1);
9537
9538 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9539 size_t size = 0;
9540 count = CAM_EFFECT_MODE_MAX;
9541 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9542 for (size_t i = 0; i < count; i++) {
9543 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9544 gCamCapability[cameraId]->supported_effects[i]);
9545 if (NAME_NOT_FOUND != val) {
9546 avail_effects[size] = (uint8_t)val;
9547 size++;
9548 }
9549 }
9550 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9551 avail_effects,
9552 size);
9553
9554 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9555 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9556 size_t supported_scene_modes_cnt = 0;
9557 count = CAM_SCENE_MODE_MAX;
9558 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9559 for (size_t i = 0; i < count; i++) {
9560 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9561 CAM_SCENE_MODE_OFF) {
9562 int val = lookupFwkName(SCENE_MODES_MAP,
9563 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9564 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009565
Thierry Strudel3d639192016-09-09 11:52:26 -07009566 if (NAME_NOT_FOUND != val) {
9567 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9568 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9569 supported_scene_modes_cnt++;
9570 }
9571 }
9572 }
9573 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9574 avail_scene_modes,
9575 supported_scene_modes_cnt);
9576
9577 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9578 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9579 supported_scene_modes_cnt,
9580 CAM_SCENE_MODE_MAX,
9581 scene_mode_overrides,
9582 supported_indexes,
9583 cameraId);
9584
9585 if (supported_scene_modes_cnt == 0) {
9586 supported_scene_modes_cnt = 1;
9587 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9588 }
9589
9590 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9591 scene_mode_overrides, supported_scene_modes_cnt * 3);
9592
9593 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9594 ANDROID_CONTROL_MODE_AUTO,
9595 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9596 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9597 available_control_modes,
9598 3);
9599
9600 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9601 size = 0;
9602 count = CAM_ANTIBANDING_MODE_MAX;
9603 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9604 for (size_t i = 0; i < count; i++) {
9605 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9606 gCamCapability[cameraId]->supported_antibandings[i]);
9607 if (NAME_NOT_FOUND != val) {
9608 avail_antibanding_modes[size] = (uint8_t)val;
9609 size++;
9610 }
9611
9612 }
9613 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9614 avail_antibanding_modes,
9615 size);
9616
9617 uint8_t avail_abberation_modes[] = {
9618 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9619 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9620 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9621 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9622 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9623 if (0 == count) {
9624 // If no aberration correction modes are available for a device, this advertise OFF mode
9625 size = 1;
9626 } else {
9627 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9628 // So, advertize all 3 modes if atleast any one mode is supported as per the
9629 // new M requirement
9630 size = 3;
9631 }
9632 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9633 avail_abberation_modes,
9634 size);
9635
9636 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9637 size = 0;
9638 count = CAM_FOCUS_MODE_MAX;
9639 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9640 for (size_t i = 0; i < count; i++) {
9641 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9642 gCamCapability[cameraId]->supported_focus_modes[i]);
9643 if (NAME_NOT_FOUND != val) {
9644 avail_af_modes[size] = (uint8_t)val;
9645 size++;
9646 }
9647 }
9648 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9649 avail_af_modes,
9650 size);
9651
9652 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9653 size = 0;
9654 count = CAM_WB_MODE_MAX;
9655 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9656 for (size_t i = 0; i < count; i++) {
9657 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9658 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9659 gCamCapability[cameraId]->supported_white_balances[i]);
9660 if (NAME_NOT_FOUND != val) {
9661 avail_awb_modes[size] = (uint8_t)val;
9662 size++;
9663 }
9664 }
9665 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9666 avail_awb_modes,
9667 size);
9668
9669 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9670 count = CAM_FLASH_FIRING_LEVEL_MAX;
9671 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9672 count);
9673 for (size_t i = 0; i < count; i++) {
9674 available_flash_levels[i] =
9675 gCamCapability[cameraId]->supported_firing_levels[i];
9676 }
9677 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9678 available_flash_levels, count);
9679
9680 uint8_t flashAvailable;
9681 if (gCamCapability[cameraId]->flash_available)
9682 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9683 else
9684 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9685 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9686 &flashAvailable, 1);
9687
9688 Vector<uint8_t> avail_ae_modes;
9689 count = CAM_AE_MODE_MAX;
9690 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9691 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009692 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9693 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9694 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9695 }
9696 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009697 }
9698 if (flashAvailable) {
9699 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9700 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9701 }
9702 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9703 avail_ae_modes.array(),
9704 avail_ae_modes.size());
9705
9706 int32_t sensitivity_range[2];
9707 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9708 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9709 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9710 sensitivity_range,
9711 sizeof(sensitivity_range) / sizeof(int32_t));
9712
9713 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9714 &gCamCapability[cameraId]->max_analog_sensitivity,
9715 1);
9716
9717 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9718 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9719 &sensor_orientation,
9720 1);
9721
9722 int32_t max_output_streams[] = {
9723 MAX_STALLING_STREAMS,
9724 MAX_PROCESSED_STREAMS,
9725 MAX_RAW_STREAMS};
9726 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9727 max_output_streams,
9728 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9729
9730 uint8_t avail_leds = 0;
9731 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9732 &avail_leds, 0);
9733
9734 uint8_t focus_dist_calibrated;
9735 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9736 gCamCapability[cameraId]->focus_dist_calibrated);
9737 if (NAME_NOT_FOUND != val) {
9738 focus_dist_calibrated = (uint8_t)val;
9739 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9740 &focus_dist_calibrated, 1);
9741 }
9742
9743 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9744 size = 0;
9745 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9746 MAX_TEST_PATTERN_CNT);
9747 for (size_t i = 0; i < count; i++) {
9748 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9749 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9750 if (NAME_NOT_FOUND != testpatternMode) {
9751 avail_testpattern_modes[size] = testpatternMode;
9752 size++;
9753 }
9754 }
9755 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9756 avail_testpattern_modes,
9757 size);
9758
9759 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9760 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9761 &max_pipeline_depth,
9762 1);
9763
9764 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9765 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9766 &partial_result_count,
9767 1);
9768
9769 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9770 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9771
9772 Vector<uint8_t> available_capabilities;
9773 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9774 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9775 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9776 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9777 if (supportBurst) {
9778 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9779 }
9780 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9781 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9782 if (hfrEnable && available_hfr_configs.array()) {
9783 available_capabilities.add(
9784 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9785 }
9786
9787 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9788 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9789 }
9790 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9791 available_capabilities.array(),
9792 available_capabilities.size());
9793
9794 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9795 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9796 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9797 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9798
9799 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9800 &aeLockAvailable, 1);
9801
9802 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9803 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9804 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9805 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9806
9807 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9808 &awbLockAvailable, 1);
9809
9810 int32_t max_input_streams = 1;
9811 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9812 &max_input_streams,
9813 1);
9814
9815 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9816 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9817 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9818 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9819 HAL_PIXEL_FORMAT_YCbCr_420_888};
9820 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9821 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9822
9823 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9824 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9825 &max_latency,
9826 1);
9827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009828#ifndef USE_HAL_3_3
9829 int32_t isp_sensitivity_range[2];
9830 isp_sensitivity_range[0] =
9831 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9832 isp_sensitivity_range[1] =
9833 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9834 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9835 isp_sensitivity_range,
9836 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9837#endif
9838
Thierry Strudel3d639192016-09-09 11:52:26 -07009839 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9840 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9841 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9842 available_hot_pixel_modes,
9843 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9844
9845 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9846 ANDROID_SHADING_MODE_FAST,
9847 ANDROID_SHADING_MODE_HIGH_QUALITY};
9848 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9849 available_shading_modes,
9850 3);
9851
9852 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9853 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9854 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9855 available_lens_shading_map_modes,
9856 2);
9857
9858 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9859 ANDROID_EDGE_MODE_FAST,
9860 ANDROID_EDGE_MODE_HIGH_QUALITY,
9861 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9862 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9863 available_edge_modes,
9864 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9865
9866 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9867 ANDROID_NOISE_REDUCTION_MODE_FAST,
9868 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9869 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9870 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9871 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9872 available_noise_red_modes,
9873 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9874
9875 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9876 ANDROID_TONEMAP_MODE_FAST,
9877 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9878 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9879 available_tonemap_modes,
9880 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9881
9882 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9883 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9884 available_hot_pixel_map_modes,
9885 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9886
9887 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9888 gCamCapability[cameraId]->reference_illuminant1);
9889 if (NAME_NOT_FOUND != val) {
9890 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9891 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9892 }
9893
9894 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9895 gCamCapability[cameraId]->reference_illuminant2);
9896 if (NAME_NOT_FOUND != val) {
9897 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9898 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9899 }
9900
9901 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9902 (void *)gCamCapability[cameraId]->forward_matrix1,
9903 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9904
9905 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9906 (void *)gCamCapability[cameraId]->forward_matrix2,
9907 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9908
9909 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9910 (void *)gCamCapability[cameraId]->color_transform1,
9911 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9912
9913 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9914 (void *)gCamCapability[cameraId]->color_transform2,
9915 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9916
9917 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9918 (void *)gCamCapability[cameraId]->calibration_transform1,
9919 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9920
9921 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9922 (void *)gCamCapability[cameraId]->calibration_transform2,
9923 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9924
9925 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9926 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9927 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9928 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9929 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9930 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9931 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9932 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9933 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9934 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9935 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9936 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9937 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9938 ANDROID_JPEG_GPS_COORDINATES,
9939 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9940 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9941 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9942 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9943 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9944 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9945 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9946 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9947 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9948 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009949#ifndef USE_HAL_3_3
9950 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9951#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009952 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009953 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009954 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9955 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009956 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009957 /* DevCamDebug metadata request_keys_basic */
9958 DEVCAMDEBUG_META_ENABLE,
9959 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009960 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009961 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009962 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009963 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009964 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009965
9966 size_t request_keys_cnt =
9967 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9968 Vector<int32_t> available_request_keys;
9969 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9970 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9971 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9972 }
9973
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009974 if (gExposeEnableZslKey) {
Chien-Yu Chen63fc73b2017-04-26 16:43:28 -07009975 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
9976 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9977 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009978 }
9979
Thierry Strudel3d639192016-09-09 11:52:26 -07009980 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9981 available_request_keys.array(), available_request_keys.size());
9982
9983 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9984 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9985 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9986 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9987 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9988 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9989 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9990 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9991 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9992 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9993 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9994 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9995 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9996 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9997 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9998 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9999 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010000 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010001 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10002 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10003 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010004 ANDROID_STATISTICS_FACE_SCORES,
10005#ifndef USE_HAL_3_3
10006 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10007#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010008 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010009 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010010 // DevCamDebug metadata result_keys_basic
10011 DEVCAMDEBUG_META_ENABLE,
10012 // DevCamDebug metadata result_keys AF
10013 DEVCAMDEBUG_AF_LENS_POSITION,
10014 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10015 DEVCAMDEBUG_AF_TOF_DISTANCE,
10016 DEVCAMDEBUG_AF_LUMA,
10017 DEVCAMDEBUG_AF_HAF_STATE,
10018 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10019 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10020 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10021 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10022 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10023 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10024 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10025 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10026 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10027 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10028 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10029 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10030 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10031 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10032 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10033 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10034 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10035 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10036 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10037 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10038 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10039 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10040 // DevCamDebug metadata result_keys AEC
10041 DEVCAMDEBUG_AEC_TARGET_LUMA,
10042 DEVCAMDEBUG_AEC_COMP_LUMA,
10043 DEVCAMDEBUG_AEC_AVG_LUMA,
10044 DEVCAMDEBUG_AEC_CUR_LUMA,
10045 DEVCAMDEBUG_AEC_LINECOUNT,
10046 DEVCAMDEBUG_AEC_REAL_GAIN,
10047 DEVCAMDEBUG_AEC_EXP_INDEX,
10048 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010049 // DevCamDebug metadata result_keys zzHDR
10050 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10051 DEVCAMDEBUG_AEC_L_LINECOUNT,
10052 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10053 DEVCAMDEBUG_AEC_S_LINECOUNT,
10054 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10055 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10056 // DevCamDebug metadata result_keys ADRC
10057 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10058 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10059 DEVCAMDEBUG_AEC_GTM_RATIO,
10060 DEVCAMDEBUG_AEC_LTM_RATIO,
10061 DEVCAMDEBUG_AEC_LA_RATIO,
10062 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010063 // DevCamDebug metadata result_keys AWB
10064 DEVCAMDEBUG_AWB_R_GAIN,
10065 DEVCAMDEBUG_AWB_G_GAIN,
10066 DEVCAMDEBUG_AWB_B_GAIN,
10067 DEVCAMDEBUG_AWB_CCT,
10068 DEVCAMDEBUG_AWB_DECISION,
10069 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010070 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10071 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10072 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010073 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010074 };
10075
Thierry Strudel3d639192016-09-09 11:52:26 -070010076 size_t result_keys_cnt =
10077 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10078
10079 Vector<int32_t> available_result_keys;
10080 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10081 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10082 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10083 }
10084 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10085 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10086 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10087 }
10088 if (supportedFaceDetectMode == 1) {
10089 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10090 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10091 } else if ((supportedFaceDetectMode == 2) ||
10092 (supportedFaceDetectMode == 3)) {
10093 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10094 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10095 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010096#ifndef USE_HAL_3_3
10097 if (hasBlackRegions) {
10098 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10099 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10100 }
10101#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010102
10103 if (gExposeEnableZslKey) {
10104 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10105 }
10106
Thierry Strudel3d639192016-09-09 11:52:26 -070010107 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10108 available_result_keys.array(), available_result_keys.size());
10109
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010110 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010111 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10112 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10113 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10114 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10115 ANDROID_SCALER_CROPPING_TYPE,
10116 ANDROID_SYNC_MAX_LATENCY,
10117 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10118 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10119 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10120 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10121 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10122 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10123 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10124 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10125 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10126 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10127 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10128 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10129 ANDROID_LENS_FACING,
10130 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10131 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10132 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10133 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10134 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10135 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10136 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10137 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10138 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10139 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10140 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10141 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10142 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10143 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10144 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10145 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10146 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10147 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10148 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10149 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010150 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010151 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10152 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10153 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10154 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10155 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10156 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10157 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10158 ANDROID_CONTROL_AVAILABLE_MODES,
10159 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10160 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10161 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10162 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010163 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10164#ifndef USE_HAL_3_3
10165 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10166 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10167#endif
10168 };
10169
10170 Vector<int32_t> available_characteristics_keys;
10171 available_characteristics_keys.appendArray(characteristics_keys_basic,
10172 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10173#ifndef USE_HAL_3_3
10174 if (hasBlackRegions) {
10175 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10176 }
10177#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010178
10179 if (0 <= indexPD) {
10180 int32_t depthKeys[] = {
10181 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10182 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10183 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10184 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10185 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10186 };
10187 available_characteristics_keys.appendArray(depthKeys,
10188 sizeof(depthKeys) / sizeof(depthKeys[0]));
10189 }
10190
Thierry Strudel3d639192016-09-09 11:52:26 -070010191 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010192 available_characteristics_keys.array(),
10193 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010194
10195 /*available stall durations depend on the hw + sw and will be different for different devices */
10196 /*have to add for raw after implementation*/
10197 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10198 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10199
10200 Vector<int64_t> available_stall_durations;
10201 for (uint32_t j = 0; j < stall_formats_count; j++) {
10202 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10203 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10204 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10205 available_stall_durations.add(stall_formats[j]);
10206 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10207 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10208 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10209 }
10210 } else {
10211 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10212 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10213 available_stall_durations.add(stall_formats[j]);
10214 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10215 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10216 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10217 }
10218 }
10219 }
10220 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10221 available_stall_durations.array(),
10222 available_stall_durations.size());
10223
10224 //QCAMERA3_OPAQUE_RAW
10225 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10226 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10227 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10228 case LEGACY_RAW:
10229 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10230 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10231 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10232 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10233 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10234 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10235 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10236 break;
10237 case MIPI_RAW:
10238 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10239 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10240 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10241 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10242 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10243 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10244 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10245 break;
10246 default:
10247 LOGE("unknown opaque_raw_format %d",
10248 gCamCapability[cameraId]->opaque_raw_fmt);
10249 break;
10250 }
10251 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10252
10253 Vector<int32_t> strides;
10254 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10255 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10256 cam_stream_buf_plane_info_t buf_planes;
10257 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10258 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10259 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10260 &gCamCapability[cameraId]->padding_info, &buf_planes);
10261 strides.add(buf_planes.plane_info.mp[0].stride);
10262 }
10263 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10264 strides.size());
10265
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010266 //TBD: remove the following line once backend advertises zzHDR in feature mask
10267 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010268 //Video HDR default
10269 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10270 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010271 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010272 int32_t vhdr_mode[] = {
10273 QCAMERA3_VIDEO_HDR_MODE_OFF,
10274 QCAMERA3_VIDEO_HDR_MODE_ON};
10275
10276 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10277 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10278 vhdr_mode, vhdr_mode_count);
10279 }
10280
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10282 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10283 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10284
10285 uint8_t isMonoOnly =
10286 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10287 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10288 &isMonoOnly, 1);
10289
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010290#ifndef USE_HAL_3_3
10291 Vector<int32_t> opaque_size;
10292 for (size_t j = 0; j < scalar_formats_count; j++) {
10293 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10294 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10295 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10296 cam_stream_buf_plane_info_t buf_planes;
10297
10298 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10299 &gCamCapability[cameraId]->padding_info, &buf_planes);
10300
10301 if (rc == 0) {
10302 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10303 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10304 opaque_size.add(buf_planes.plane_info.frame_len);
10305 }else {
10306 LOGE("raw frame calculation failed!");
10307 }
10308 }
10309 }
10310 }
10311
10312 if ((opaque_size.size() > 0) &&
10313 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10314 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10315 else
10316 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10317#endif
10318
Thierry Strudel04e026f2016-10-10 11:27:36 -070010319 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10320 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10321 size = 0;
10322 count = CAM_IR_MODE_MAX;
10323 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10324 for (size_t i = 0; i < count; i++) {
10325 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10326 gCamCapability[cameraId]->supported_ir_modes[i]);
10327 if (NAME_NOT_FOUND != val) {
10328 avail_ir_modes[size] = (int32_t)val;
10329 size++;
10330 }
10331 }
10332 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10333 avail_ir_modes, size);
10334 }
10335
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010336 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10337 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10338 size = 0;
10339 count = CAM_AEC_CONVERGENCE_MAX;
10340 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10341 for (size_t i = 0; i < count; i++) {
10342 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10343 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10344 if (NAME_NOT_FOUND != val) {
10345 available_instant_aec_modes[size] = (int32_t)val;
10346 size++;
10347 }
10348 }
10349 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10350 available_instant_aec_modes, size);
10351 }
10352
Thierry Strudel54dc9782017-02-15 12:12:10 -080010353 int32_t sharpness_range[] = {
10354 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10355 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10356 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10357
10358 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10359 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10360 size = 0;
10361 count = CAM_BINNING_CORRECTION_MODE_MAX;
10362 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10363 for (size_t i = 0; i < count; i++) {
10364 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10365 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10366 gCamCapability[cameraId]->supported_binning_modes[i]);
10367 if (NAME_NOT_FOUND != val) {
10368 avail_binning_modes[size] = (int32_t)val;
10369 size++;
10370 }
10371 }
10372 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10373 avail_binning_modes, size);
10374 }
10375
10376 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10377 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10378 size = 0;
10379 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10380 for (size_t i = 0; i < count; i++) {
10381 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10382 gCamCapability[cameraId]->supported_aec_modes[i]);
10383 if (NAME_NOT_FOUND != val)
10384 available_aec_modes[size++] = val;
10385 }
10386 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10387 available_aec_modes, size);
10388 }
10389
10390 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10391 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10392 size = 0;
10393 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10394 for (size_t i = 0; i < count; i++) {
10395 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10396 gCamCapability[cameraId]->supported_iso_modes[i]);
10397 if (NAME_NOT_FOUND != val)
10398 available_iso_modes[size++] = val;
10399 }
10400 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10401 available_iso_modes, size);
10402 }
10403
10404 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10405 for (size_t i = 0; i < count; i++)
10406 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10407 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10408 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10409
10410 int32_t available_saturation_range[4];
10411 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10412 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10413 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10414 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10415 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10416 available_saturation_range, 4);
10417
10418 uint8_t is_hdr_values[2];
10419 is_hdr_values[0] = 0;
10420 is_hdr_values[1] = 1;
10421 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10422 is_hdr_values, 2);
10423
10424 float is_hdr_confidence_range[2];
10425 is_hdr_confidence_range[0] = 0.0;
10426 is_hdr_confidence_range[1] = 1.0;
10427 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10428 is_hdr_confidence_range, 2);
10429
Emilian Peev0a972ef2017-03-16 10:25:53 +000010430 size_t eepromLength = strnlen(
10431 reinterpret_cast<const char *>(
10432 gCamCapability[cameraId]->eeprom_version_info),
10433 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10434 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010435 char easelInfo[] = ",E:N";
10436 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10437 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10438 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010439 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10440 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010441 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010442 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10443 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10444 }
10445
Thierry Strudel3d639192016-09-09 11:52:26 -070010446 gStaticMetadata[cameraId] = staticInfo.release();
10447 return rc;
10448}
10449
10450/*===========================================================================
10451 * FUNCTION : makeTable
10452 *
10453 * DESCRIPTION: make a table of sizes
10454 *
10455 * PARAMETERS :
10456 *
10457 *
10458 *==========================================================================*/
10459void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10460 size_t max_size, int32_t *sizeTable)
10461{
10462 size_t j = 0;
10463 if (size > max_size) {
10464 size = max_size;
10465 }
10466 for (size_t i = 0; i < size; i++) {
10467 sizeTable[j] = dimTable[i].width;
10468 sizeTable[j+1] = dimTable[i].height;
10469 j+=2;
10470 }
10471}
10472
10473/*===========================================================================
10474 * FUNCTION : makeFPSTable
10475 *
10476 * DESCRIPTION: make a table of fps ranges
10477 *
10478 * PARAMETERS :
10479 *
10480 *==========================================================================*/
10481void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10482 size_t max_size, int32_t *fpsRangesTable)
10483{
10484 size_t j = 0;
10485 if (size > max_size) {
10486 size = max_size;
10487 }
10488 for (size_t i = 0; i < size; i++) {
10489 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10490 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10491 j+=2;
10492 }
10493}
10494
10495/*===========================================================================
10496 * FUNCTION : makeOverridesList
10497 *
10498 * DESCRIPTION: make a list of scene mode overrides
10499 *
10500 * PARAMETERS :
10501 *
10502 *
10503 *==========================================================================*/
10504void QCamera3HardwareInterface::makeOverridesList(
10505 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10506 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10507{
10508 /*daemon will give a list of overrides for all scene modes.
10509 However we should send the fwk only the overrides for the scene modes
10510 supported by the framework*/
10511 size_t j = 0;
10512 if (size > max_size) {
10513 size = max_size;
10514 }
10515 size_t focus_count = CAM_FOCUS_MODE_MAX;
10516 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10517 focus_count);
10518 for (size_t i = 0; i < size; i++) {
10519 bool supt = false;
10520 size_t index = supported_indexes[i];
10521 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10522 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10523 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10524 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10525 overridesTable[index].awb_mode);
10526 if (NAME_NOT_FOUND != val) {
10527 overridesList[j+1] = (uint8_t)val;
10528 }
10529 uint8_t focus_override = overridesTable[index].af_mode;
10530 for (size_t k = 0; k < focus_count; k++) {
10531 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10532 supt = true;
10533 break;
10534 }
10535 }
10536 if (supt) {
10537 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10538 focus_override);
10539 if (NAME_NOT_FOUND != val) {
10540 overridesList[j+2] = (uint8_t)val;
10541 }
10542 } else {
10543 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10544 }
10545 j+=3;
10546 }
10547}
10548
10549/*===========================================================================
10550 * FUNCTION : filterJpegSizes
10551 *
10552 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10553 * could be downscaled to
10554 *
10555 * PARAMETERS :
10556 *
10557 * RETURN : length of jpegSizes array
10558 *==========================================================================*/
10559
10560size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10561 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10562 uint8_t downscale_factor)
10563{
10564 if (0 == downscale_factor) {
10565 downscale_factor = 1;
10566 }
10567
10568 int32_t min_width = active_array_size.width / downscale_factor;
10569 int32_t min_height = active_array_size.height / downscale_factor;
10570 size_t jpegSizesCnt = 0;
10571 if (processedSizesCnt > maxCount) {
10572 processedSizesCnt = maxCount;
10573 }
10574 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10575 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10576 jpegSizes[jpegSizesCnt] = processedSizes[i];
10577 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10578 jpegSizesCnt += 2;
10579 }
10580 }
10581 return jpegSizesCnt;
10582}
10583
10584/*===========================================================================
10585 * FUNCTION : computeNoiseModelEntryS
10586 *
10587 * DESCRIPTION: function to map a given sensitivity to the S noise
10588 * model parameters in the DNG noise model.
10589 *
10590 * PARAMETERS : sens : the sensor sensitivity
10591 *
10592 ** RETURN : S (sensor amplification) noise
10593 *
10594 *==========================================================================*/
10595double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10596 double s = gCamCapability[mCameraId]->gradient_S * sens +
10597 gCamCapability[mCameraId]->offset_S;
10598 return ((s < 0.0) ? 0.0 : s);
10599}
10600
10601/*===========================================================================
10602 * FUNCTION : computeNoiseModelEntryO
10603 *
10604 * DESCRIPTION: function to map a given sensitivity to the O noise
10605 * model parameters in the DNG noise model.
10606 *
10607 * PARAMETERS : sens : the sensor sensitivity
10608 *
10609 ** RETURN : O (sensor readout) noise
10610 *
10611 *==========================================================================*/
10612double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10613 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10614 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10615 1.0 : (1.0 * sens / max_analog_sens);
10616 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10617 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10618 return ((o < 0.0) ? 0.0 : o);
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : getSensorSensitivity
10623 *
10624 * DESCRIPTION: convert iso_mode to an integer value
10625 *
10626 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10627 *
10628 ** RETURN : sensitivity supported by sensor
10629 *
10630 *==========================================================================*/
10631int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10632{
10633 int32_t sensitivity;
10634
10635 switch (iso_mode) {
10636 case CAM_ISO_MODE_100:
10637 sensitivity = 100;
10638 break;
10639 case CAM_ISO_MODE_200:
10640 sensitivity = 200;
10641 break;
10642 case CAM_ISO_MODE_400:
10643 sensitivity = 400;
10644 break;
10645 case CAM_ISO_MODE_800:
10646 sensitivity = 800;
10647 break;
10648 case CAM_ISO_MODE_1600:
10649 sensitivity = 1600;
10650 break;
10651 default:
10652 sensitivity = -1;
10653 break;
10654 }
10655 return sensitivity;
10656}
10657
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010658int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010659 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010660 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10661 // to connect to Easel.
10662 bool doNotpowerOnEasel =
10663 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10664
10665 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010666 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10667 return OK;
10668 }
10669
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010670 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010671 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010672 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010673 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010674 return res;
10675 }
10676
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010677 EaselManagerClientOpened = true;
10678
10679 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010680 if (res != OK) {
10681 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10682 }
10683
Chien-Yu Chene0008342017-04-26 12:41:45 -070010684 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010685 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010686
10687 // Expose enableZsl key only when HDR+ mode is enabled.
10688 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010689 }
10690
10691 return OK;
10692}
10693
Thierry Strudel3d639192016-09-09 11:52:26 -070010694/*===========================================================================
10695 * FUNCTION : getCamInfo
10696 *
10697 * DESCRIPTION: query camera capabilities
10698 *
10699 * PARAMETERS :
10700 * @cameraId : camera Id
10701 * @info : camera info struct to be filled in with camera capabilities
10702 *
10703 * RETURN : int type of status
10704 * NO_ERROR -- success
10705 * none-zero failure code
10706 *==========================================================================*/
10707int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10708 struct camera_info *info)
10709{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010710 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010711 int rc = 0;
10712
10713 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010714
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010715 {
10716 Mutex::Autolock l(gHdrPlusClientLock);
10717 rc = initHdrPlusClientLocked();
10718 if (rc != OK) {
10719 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10720 pthread_mutex_unlock(&gCamLock);
10721 return rc;
10722 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010723 }
10724
Thierry Strudel3d639192016-09-09 11:52:26 -070010725 if (NULL == gCamCapability[cameraId]) {
10726 rc = initCapabilities(cameraId);
10727 if (rc < 0) {
10728 pthread_mutex_unlock(&gCamLock);
10729 return rc;
10730 }
10731 }
10732
10733 if (NULL == gStaticMetadata[cameraId]) {
10734 rc = initStaticMetadata(cameraId);
10735 if (rc < 0) {
10736 pthread_mutex_unlock(&gCamLock);
10737 return rc;
10738 }
10739 }
10740
10741 switch(gCamCapability[cameraId]->position) {
10742 case CAM_POSITION_BACK:
10743 case CAM_POSITION_BACK_AUX:
10744 info->facing = CAMERA_FACING_BACK;
10745 break;
10746
10747 case CAM_POSITION_FRONT:
10748 case CAM_POSITION_FRONT_AUX:
10749 info->facing = CAMERA_FACING_FRONT;
10750 break;
10751
10752 default:
10753 LOGE("Unknown position type %d for camera id:%d",
10754 gCamCapability[cameraId]->position, cameraId);
10755 rc = -1;
10756 break;
10757 }
10758
10759
10760 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010761#ifndef USE_HAL_3_3
10762 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10763#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010764 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010765#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010766 info->static_camera_characteristics = gStaticMetadata[cameraId];
10767
10768 //For now assume both cameras can operate independently.
10769 info->conflicting_devices = NULL;
10770 info->conflicting_devices_length = 0;
10771
10772 //resource cost is 100 * MIN(1.0, m/M),
10773 //where m is throughput requirement with maximum stream configuration
10774 //and M is CPP maximum throughput.
10775 float max_fps = 0.0;
10776 for (uint32_t i = 0;
10777 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10778 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10779 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10780 }
10781 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10782 gCamCapability[cameraId]->active_array_size.width *
10783 gCamCapability[cameraId]->active_array_size.height * max_fps /
10784 gCamCapability[cameraId]->max_pixel_bandwidth;
10785 info->resource_cost = 100 * MIN(1.0, ratio);
10786 LOGI("camera %d resource cost is %d", cameraId,
10787 info->resource_cost);
10788
10789 pthread_mutex_unlock(&gCamLock);
10790 return rc;
10791}
10792
10793/*===========================================================================
10794 * FUNCTION : translateCapabilityToMetadata
10795 *
10796 * DESCRIPTION: translate the capability into camera_metadata_t
10797 *
10798 * PARAMETERS : type of the request
10799 *
10800 *
10801 * RETURN : success: camera_metadata_t*
10802 * failure: NULL
10803 *
10804 *==========================================================================*/
10805camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10806{
10807 if (mDefaultMetadata[type] != NULL) {
10808 return mDefaultMetadata[type];
10809 }
10810 //first time we are handling this request
10811 //fill up the metadata structure using the wrapper class
10812 CameraMetadata settings;
10813 //translate from cam_capability_t to camera_metadata_tag_t
10814 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10815 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10816 int32_t defaultRequestID = 0;
10817 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10818
10819 /* OIS disable */
10820 char ois_prop[PROPERTY_VALUE_MAX];
10821 memset(ois_prop, 0, sizeof(ois_prop));
10822 property_get("persist.camera.ois.disable", ois_prop, "0");
10823 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10824
10825 /* Force video to use OIS */
10826 char videoOisProp[PROPERTY_VALUE_MAX];
10827 memset(videoOisProp, 0, sizeof(videoOisProp));
10828 property_get("persist.camera.ois.video", videoOisProp, "1");
10829 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010830
10831 // Hybrid AE enable/disable
10832 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10833 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10834 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10835 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10836
Thierry Strudel3d639192016-09-09 11:52:26 -070010837 uint8_t controlIntent = 0;
10838 uint8_t focusMode;
10839 uint8_t vsMode;
10840 uint8_t optStabMode;
10841 uint8_t cacMode;
10842 uint8_t edge_mode;
10843 uint8_t noise_red_mode;
10844 uint8_t tonemap_mode;
10845 bool highQualityModeEntryAvailable = FALSE;
10846 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010847 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010848 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10849 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010850 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010851 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010852 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010853
Thierry Strudel3d639192016-09-09 11:52:26 -070010854 switch (type) {
10855 case CAMERA3_TEMPLATE_PREVIEW:
10856 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10857 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10858 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10859 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10860 edge_mode = ANDROID_EDGE_MODE_FAST;
10861 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10862 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10863 break;
10864 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10865 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10866 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10867 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10868 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10869 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10870 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10871 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10872 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10873 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10874 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10875 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10876 highQualityModeEntryAvailable = TRUE;
10877 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10878 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10879 fastModeEntryAvailable = TRUE;
10880 }
10881 }
10882 if (highQualityModeEntryAvailable) {
10883 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10884 } else if (fastModeEntryAvailable) {
10885 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10886 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010887 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10888 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10889 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010890 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010891 break;
10892 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10893 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10894 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10895 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010896 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10897 edge_mode = ANDROID_EDGE_MODE_FAST;
10898 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10899 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10900 if (forceVideoOis)
10901 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10902 break;
10903 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10904 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10905 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10906 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010907 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10908 edge_mode = ANDROID_EDGE_MODE_FAST;
10909 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10910 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10911 if (forceVideoOis)
10912 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10913 break;
10914 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10915 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10916 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10917 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10918 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10919 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10920 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10921 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10922 break;
10923 case CAMERA3_TEMPLATE_MANUAL:
10924 edge_mode = ANDROID_EDGE_MODE_FAST;
10925 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10926 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10927 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10928 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10929 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10930 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10931 break;
10932 default:
10933 edge_mode = ANDROID_EDGE_MODE_FAST;
10934 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10935 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10936 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10937 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10938 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10939 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10940 break;
10941 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010942 // Set CAC to OFF if underlying device doesn't support
10943 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10944 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10945 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010946 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10947 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10948 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10949 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10950 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10951 }
10952 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010953 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010954 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010955
10956 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10957 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10958 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10959 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10960 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10961 || ois_disable)
10962 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10963 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010964 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010965
10966 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10967 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10968
10969 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10970 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10971
10972 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10973 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10974
10975 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10976 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10977
10978 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10979 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10980
10981 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10982 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10983
10984 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10985 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10986
10987 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10988 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10989
10990 /*flash*/
10991 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10992 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10993
10994 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10995 settings.update(ANDROID_FLASH_FIRING_POWER,
10996 &flashFiringLevel, 1);
10997
10998 /* lens */
10999 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11000 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11001
11002 if (gCamCapability[mCameraId]->filter_densities_count) {
11003 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11004 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11005 gCamCapability[mCameraId]->filter_densities_count);
11006 }
11007
11008 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11009 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11010
Thierry Strudel3d639192016-09-09 11:52:26 -070011011 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11012 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11013
11014 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11015 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11016
11017 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11018 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11019
11020 /* face detection (default to OFF) */
11021 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11022 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11023
Thierry Strudel54dc9782017-02-15 12:12:10 -080011024 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11025 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011026
11027 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11028 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11029
11030 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11031 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11032
Thierry Strudel3d639192016-09-09 11:52:26 -070011033
11034 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11035 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11036
11037 /* Exposure time(Update the Min Exposure Time)*/
11038 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11039 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11040
11041 /* frame duration */
11042 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11043 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11044
11045 /* sensitivity */
11046 static const int32_t default_sensitivity = 100;
11047 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011048#ifndef USE_HAL_3_3
11049 static const int32_t default_isp_sensitivity =
11050 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11051 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11052#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011053
11054 /*edge mode*/
11055 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11056
11057 /*noise reduction mode*/
11058 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11059
11060 /*color correction mode*/
11061 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11062 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11063
11064 /*transform matrix mode*/
11065 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11066
11067 int32_t scaler_crop_region[4];
11068 scaler_crop_region[0] = 0;
11069 scaler_crop_region[1] = 0;
11070 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11071 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11072 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11073
11074 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11075 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11076
11077 /*focus distance*/
11078 float focus_distance = 0.0;
11079 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11080
11081 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011082 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011083 float max_range = 0.0;
11084 float max_fixed_fps = 0.0;
11085 int32_t fps_range[2] = {0, 0};
11086 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11087 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011088 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11089 TEMPLATE_MAX_PREVIEW_FPS) {
11090 continue;
11091 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011092 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11093 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11094 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11095 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11096 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11097 if (range > max_range) {
11098 fps_range[0] =
11099 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11100 fps_range[1] =
11101 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11102 max_range = range;
11103 }
11104 } else {
11105 if (range < 0.01 && max_fixed_fps <
11106 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11107 fps_range[0] =
11108 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11109 fps_range[1] =
11110 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11111 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11112 }
11113 }
11114 }
11115 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11116
11117 /*precapture trigger*/
11118 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11119 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11120
11121 /*af trigger*/
11122 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11123 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11124
11125 /* ae & af regions */
11126 int32_t active_region[] = {
11127 gCamCapability[mCameraId]->active_array_size.left,
11128 gCamCapability[mCameraId]->active_array_size.top,
11129 gCamCapability[mCameraId]->active_array_size.left +
11130 gCamCapability[mCameraId]->active_array_size.width,
11131 gCamCapability[mCameraId]->active_array_size.top +
11132 gCamCapability[mCameraId]->active_array_size.height,
11133 0};
11134 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11135 sizeof(active_region) / sizeof(active_region[0]));
11136 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11137 sizeof(active_region) / sizeof(active_region[0]));
11138
11139 /* black level lock */
11140 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11141 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11142
Thierry Strudel3d639192016-09-09 11:52:26 -070011143 //special defaults for manual template
11144 if (type == CAMERA3_TEMPLATE_MANUAL) {
11145 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11146 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11147
11148 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11149 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11150
11151 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11152 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11153
11154 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11155 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11156
11157 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11158 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11159
11160 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11161 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11162 }
11163
11164
11165 /* TNR
11166 * We'll use this location to determine which modes TNR will be set.
11167 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11168 * This is not to be confused with linking on a per stream basis that decision
11169 * is still on per-session basis and will be handled as part of config stream
11170 */
11171 uint8_t tnr_enable = 0;
11172
11173 if (m_bTnrPreview || m_bTnrVideo) {
11174
11175 switch (type) {
11176 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11177 tnr_enable = 1;
11178 break;
11179
11180 default:
11181 tnr_enable = 0;
11182 break;
11183 }
11184
11185 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11186 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11187 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11188
11189 LOGD("TNR:%d with process plate %d for template:%d",
11190 tnr_enable, tnr_process_type, type);
11191 }
11192
11193 //Update Link tags to default
11194 int32_t sync_type = CAM_TYPE_STANDALONE;
11195 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11196
11197 int32_t is_main = 0; //this doesn't matter as app should overwrite
11198 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11199
11200 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11201
11202 /* CDS default */
11203 char prop[PROPERTY_VALUE_MAX];
11204 memset(prop, 0, sizeof(prop));
11205 property_get("persist.camera.CDS", prop, "Auto");
11206 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11207 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11208 if (CAM_CDS_MODE_MAX == cds_mode) {
11209 cds_mode = CAM_CDS_MODE_AUTO;
11210 }
11211
11212 /* Disabling CDS in templates which have TNR enabled*/
11213 if (tnr_enable)
11214 cds_mode = CAM_CDS_MODE_OFF;
11215
11216 int32_t mode = cds_mode;
11217 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011218
Thierry Strudel269c81a2016-10-12 12:13:59 -070011219 /* Manual Convergence AEC Speed is disabled by default*/
11220 float default_aec_speed = 0;
11221 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11222
11223 /* Manual Convergence AWB Speed is disabled by default*/
11224 float default_awb_speed = 0;
11225 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11226
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011227 // Set instant AEC to normal convergence by default
11228 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11229 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11230
Shuzhen Wang19463d72016-03-08 11:09:52 -080011231 /* hybrid ae */
11232 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11233
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011234 if (gExposeEnableZslKey) {
11235 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11236 }
11237
Thierry Strudel3d639192016-09-09 11:52:26 -070011238 mDefaultMetadata[type] = settings.release();
11239
11240 return mDefaultMetadata[type];
11241}
11242
11243/*===========================================================================
11244 * FUNCTION : setFrameParameters
11245 *
11246 * DESCRIPTION: set parameters per frame as requested in the metadata from
11247 * framework
11248 *
11249 * PARAMETERS :
11250 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011251 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011252 * @blob_request: Whether this request is a blob request or not
11253 *
11254 * RETURN : success: NO_ERROR
11255 * failure:
11256 *==========================================================================*/
11257int QCamera3HardwareInterface::setFrameParameters(
11258 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011259 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011260 int blob_request,
11261 uint32_t snapshotStreamId)
11262{
11263 /*translate from camera_metadata_t type to parm_type_t*/
11264 int rc = 0;
11265 int32_t hal_version = CAM_HAL_V3;
11266
11267 clear_metadata_buffer(mParameters);
11268 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11269 LOGE("Failed to set hal version in the parameters");
11270 return BAD_VALUE;
11271 }
11272
11273 /*we need to update the frame number in the parameters*/
11274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11275 request->frame_number)) {
11276 LOGE("Failed to set the frame number in the parameters");
11277 return BAD_VALUE;
11278 }
11279
11280 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011282 LOGE("Failed to set stream type mask in the parameters");
11283 return BAD_VALUE;
11284 }
11285
11286 if (mUpdateDebugLevel) {
11287 uint32_t dummyDebugLevel = 0;
11288 /* The value of dummyDebugLevel is irrelavent. On
11289 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11291 dummyDebugLevel)) {
11292 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11293 return BAD_VALUE;
11294 }
11295 mUpdateDebugLevel = false;
11296 }
11297
11298 if(request->settings != NULL){
11299 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11300 if (blob_request)
11301 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11302 }
11303
11304 return rc;
11305}
11306
11307/*===========================================================================
11308 * FUNCTION : setReprocParameters
11309 *
11310 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11311 * return it.
11312 *
11313 * PARAMETERS :
11314 * @request : request that needs to be serviced
11315 *
11316 * RETURN : success: NO_ERROR
11317 * failure:
11318 *==========================================================================*/
11319int32_t QCamera3HardwareInterface::setReprocParameters(
11320 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11321 uint32_t snapshotStreamId)
11322{
11323 /*translate from camera_metadata_t type to parm_type_t*/
11324 int rc = 0;
11325
11326 if (NULL == request->settings){
11327 LOGE("Reprocess settings cannot be NULL");
11328 return BAD_VALUE;
11329 }
11330
11331 if (NULL == reprocParam) {
11332 LOGE("Invalid reprocessing metadata buffer");
11333 return BAD_VALUE;
11334 }
11335 clear_metadata_buffer(reprocParam);
11336
11337 /*we need to update the frame number in the parameters*/
11338 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11339 request->frame_number)) {
11340 LOGE("Failed to set the frame number in the parameters");
11341 return BAD_VALUE;
11342 }
11343
11344 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11345 if (rc < 0) {
11346 LOGE("Failed to translate reproc request");
11347 return rc;
11348 }
11349
11350 CameraMetadata frame_settings;
11351 frame_settings = request->settings;
11352 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11353 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11354 int32_t *crop_count =
11355 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11356 int32_t *crop_data =
11357 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11358 int32_t *roi_map =
11359 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11360 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11361 cam_crop_data_t crop_meta;
11362 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11363 crop_meta.num_of_streams = 1;
11364 crop_meta.crop_info[0].crop.left = crop_data[0];
11365 crop_meta.crop_info[0].crop.top = crop_data[1];
11366 crop_meta.crop_info[0].crop.width = crop_data[2];
11367 crop_meta.crop_info[0].crop.height = crop_data[3];
11368
11369 crop_meta.crop_info[0].roi_map.left =
11370 roi_map[0];
11371 crop_meta.crop_info[0].roi_map.top =
11372 roi_map[1];
11373 crop_meta.crop_info[0].roi_map.width =
11374 roi_map[2];
11375 crop_meta.crop_info[0].roi_map.height =
11376 roi_map[3];
11377
11378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11379 rc = BAD_VALUE;
11380 }
11381 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11382 request->input_buffer->stream,
11383 crop_meta.crop_info[0].crop.left,
11384 crop_meta.crop_info[0].crop.top,
11385 crop_meta.crop_info[0].crop.width,
11386 crop_meta.crop_info[0].crop.height);
11387 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11388 request->input_buffer->stream,
11389 crop_meta.crop_info[0].roi_map.left,
11390 crop_meta.crop_info[0].roi_map.top,
11391 crop_meta.crop_info[0].roi_map.width,
11392 crop_meta.crop_info[0].roi_map.height);
11393 } else {
11394 LOGE("Invalid reprocess crop count %d!", *crop_count);
11395 }
11396 } else {
11397 LOGE("No crop data from matching output stream");
11398 }
11399
11400 /* These settings are not needed for regular requests so handle them specially for
11401 reprocess requests; information needed for EXIF tags */
11402 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11403 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11404 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11405 if (NAME_NOT_FOUND != val) {
11406 uint32_t flashMode = (uint32_t)val;
11407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11408 rc = BAD_VALUE;
11409 }
11410 } else {
11411 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11412 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11413 }
11414 } else {
11415 LOGH("No flash mode in reprocess settings");
11416 }
11417
11418 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11419 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11421 rc = BAD_VALUE;
11422 }
11423 } else {
11424 LOGH("No flash state in reprocess settings");
11425 }
11426
11427 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11428 uint8_t *reprocessFlags =
11429 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11430 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11431 *reprocessFlags)) {
11432 rc = BAD_VALUE;
11433 }
11434 }
11435
Thierry Strudel54dc9782017-02-15 12:12:10 -080011436 // Add exif debug data to internal metadata
11437 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11438 mm_jpeg_debug_exif_params_t *debug_params =
11439 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11440 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11441 // AE
11442 if (debug_params->ae_debug_params_valid == TRUE) {
11443 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11444 debug_params->ae_debug_params);
11445 }
11446 // AWB
11447 if (debug_params->awb_debug_params_valid == TRUE) {
11448 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11449 debug_params->awb_debug_params);
11450 }
11451 // AF
11452 if (debug_params->af_debug_params_valid == TRUE) {
11453 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11454 debug_params->af_debug_params);
11455 }
11456 // ASD
11457 if (debug_params->asd_debug_params_valid == TRUE) {
11458 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11459 debug_params->asd_debug_params);
11460 }
11461 // Stats
11462 if (debug_params->stats_debug_params_valid == TRUE) {
11463 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11464 debug_params->stats_debug_params);
11465 }
11466 // BE Stats
11467 if (debug_params->bestats_debug_params_valid == TRUE) {
11468 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11469 debug_params->bestats_debug_params);
11470 }
11471 // BHIST
11472 if (debug_params->bhist_debug_params_valid == TRUE) {
11473 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11474 debug_params->bhist_debug_params);
11475 }
11476 // 3A Tuning
11477 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11478 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11479 debug_params->q3a_tuning_debug_params);
11480 }
11481 }
11482
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011483 // Add metadata which reprocess needs
11484 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11485 cam_reprocess_info_t *repro_info =
11486 (cam_reprocess_info_t *)frame_settings.find
11487 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011488 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011489 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011490 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011491 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011492 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011493 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011494 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011495 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011496 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011497 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011498 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011499 repro_info->pipeline_flip);
11500 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11501 repro_info->af_roi);
11502 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11503 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011504 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11505 CAM_INTF_PARM_ROTATION metadata then has been added in
11506 translateToHalMetadata. HAL need to keep this new rotation
11507 metadata. Otherwise, the old rotation info saved in the vendor tag
11508 would be used */
11509 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11510 CAM_INTF_PARM_ROTATION, reprocParam) {
11511 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11512 } else {
11513 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011514 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011515 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011516 }
11517
11518 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11519 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11520 roi.width and roi.height would be the final JPEG size.
11521 For now, HAL only checks this for reprocess request */
11522 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11523 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11524 uint8_t *enable =
11525 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11526 if (*enable == TRUE) {
11527 int32_t *crop_data =
11528 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11529 cam_stream_crop_info_t crop_meta;
11530 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11531 crop_meta.stream_id = 0;
11532 crop_meta.crop.left = crop_data[0];
11533 crop_meta.crop.top = crop_data[1];
11534 crop_meta.crop.width = crop_data[2];
11535 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011536 // The JPEG crop roi should match cpp output size
11537 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11538 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11539 crop_meta.roi_map.left = 0;
11540 crop_meta.roi_map.top = 0;
11541 crop_meta.roi_map.width = cpp_crop->crop.width;
11542 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011543 }
11544 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11545 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011546 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011547 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011548 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11549 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011550 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011551 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11552
11553 // Add JPEG scale information
11554 cam_dimension_t scale_dim;
11555 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11556 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11557 int32_t *roi =
11558 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11559 scale_dim.width = roi[2];
11560 scale_dim.height = roi[3];
11561 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11562 scale_dim);
11563 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11564 scale_dim.width, scale_dim.height, mCameraId);
11565 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011566 }
11567 }
11568
11569 return rc;
11570}
11571
11572/*===========================================================================
11573 * FUNCTION : saveRequestSettings
11574 *
11575 * DESCRIPTION: Add any settings that might have changed to the request settings
11576 * and save the settings to be applied on the frame
11577 *
11578 * PARAMETERS :
11579 * @jpegMetadata : the extracted and/or modified jpeg metadata
11580 * @request : request with initial settings
11581 *
11582 * RETURN :
11583 * camera_metadata_t* : pointer to the saved request settings
11584 *==========================================================================*/
11585camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11586 const CameraMetadata &jpegMetadata,
11587 camera3_capture_request_t *request)
11588{
11589 camera_metadata_t *resultMetadata;
11590 CameraMetadata camMetadata;
11591 camMetadata = request->settings;
11592
11593 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11594 int32_t thumbnail_size[2];
11595 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11596 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11597 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11598 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11599 }
11600
11601 if (request->input_buffer != NULL) {
11602 uint8_t reprocessFlags = 1;
11603 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11604 (uint8_t*)&reprocessFlags,
11605 sizeof(reprocessFlags));
11606 }
11607
11608 resultMetadata = camMetadata.release();
11609 return resultMetadata;
11610}
11611
11612/*===========================================================================
11613 * FUNCTION : setHalFpsRange
11614 *
11615 * DESCRIPTION: set FPS range parameter
11616 *
11617 *
11618 * PARAMETERS :
11619 * @settings : Metadata from framework
11620 * @hal_metadata: Metadata buffer
11621 *
11622 *
11623 * RETURN : success: NO_ERROR
11624 * failure:
11625 *==========================================================================*/
11626int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11627 metadata_buffer_t *hal_metadata)
11628{
11629 int32_t rc = NO_ERROR;
11630 cam_fps_range_t fps_range;
11631 fps_range.min_fps = (float)
11632 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11633 fps_range.max_fps = (float)
11634 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11635 fps_range.video_min_fps = fps_range.min_fps;
11636 fps_range.video_max_fps = fps_range.max_fps;
11637
11638 LOGD("aeTargetFpsRange fps: [%f %f]",
11639 fps_range.min_fps, fps_range.max_fps);
11640 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11641 * follows:
11642 * ---------------------------------------------------------------|
11643 * Video stream is absent in configure_streams |
11644 * (Camcorder preview before the first video record |
11645 * ---------------------------------------------------------------|
11646 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11647 * | | | vid_min/max_fps|
11648 * ---------------------------------------------------------------|
11649 * NO | [ 30, 240] | 240 | [240, 240] |
11650 * |-------------|-------------|----------------|
11651 * | [240, 240] | 240 | [240, 240] |
11652 * ---------------------------------------------------------------|
11653 * Video stream is present in configure_streams |
11654 * ---------------------------------------------------------------|
11655 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11656 * | | | vid_min/max_fps|
11657 * ---------------------------------------------------------------|
11658 * NO | [ 30, 240] | 240 | [240, 240] |
11659 * (camcorder prev |-------------|-------------|----------------|
11660 * after video rec | [240, 240] | 240 | [240, 240] |
11661 * is stopped) | | | |
11662 * ---------------------------------------------------------------|
11663 * YES | [ 30, 240] | 240 | [240, 240] |
11664 * |-------------|-------------|----------------|
11665 * | [240, 240] | 240 | [240, 240] |
11666 * ---------------------------------------------------------------|
11667 * When Video stream is absent in configure_streams,
11668 * preview fps = sensor_fps / batchsize
11669 * Eg: for 240fps at batchSize 4, preview = 60fps
11670 * for 120fps at batchSize 4, preview = 30fps
11671 *
11672 * When video stream is present in configure_streams, preview fps is as per
11673 * the ratio of preview buffers to video buffers requested in process
11674 * capture request
11675 */
11676 mBatchSize = 0;
11677 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11678 fps_range.min_fps = fps_range.video_max_fps;
11679 fps_range.video_min_fps = fps_range.video_max_fps;
11680 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11681 fps_range.max_fps);
11682 if (NAME_NOT_FOUND != val) {
11683 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11685 return BAD_VALUE;
11686 }
11687
11688 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11689 /* If batchmode is currently in progress and the fps changes,
11690 * set the flag to restart the sensor */
11691 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11692 (mHFRVideoFps != fps_range.max_fps)) {
11693 mNeedSensorRestart = true;
11694 }
11695 mHFRVideoFps = fps_range.max_fps;
11696 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11697 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11698 mBatchSize = MAX_HFR_BATCH_SIZE;
11699 }
11700 }
11701 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11702
11703 }
11704 } else {
11705 /* HFR mode is session param in backend/ISP. This should be reset when
11706 * in non-HFR mode */
11707 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11709 return BAD_VALUE;
11710 }
11711 }
11712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11713 return BAD_VALUE;
11714 }
11715 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11716 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11717 return rc;
11718}
11719
11720/*===========================================================================
11721 * FUNCTION : translateToHalMetadata
11722 *
11723 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11724 *
11725 *
11726 * PARAMETERS :
11727 * @request : request sent from framework
11728 *
11729 *
11730 * RETURN : success: NO_ERROR
11731 * failure:
11732 *==========================================================================*/
11733int QCamera3HardwareInterface::translateToHalMetadata
11734 (const camera3_capture_request_t *request,
11735 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011736 uint32_t snapshotStreamId) {
11737 if (request == nullptr || hal_metadata == nullptr) {
11738 return BAD_VALUE;
11739 }
11740
11741 int64_t minFrameDuration = getMinFrameDuration(request);
11742
11743 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11744 minFrameDuration);
11745}
11746
11747int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11748 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11749 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11750
Thierry Strudel3d639192016-09-09 11:52:26 -070011751 int rc = 0;
11752 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011753 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011754
11755 /* Do not change the order of the following list unless you know what you are
11756 * doing.
11757 * The order is laid out in such a way that parameters in the front of the table
11758 * may be used to override the parameters later in the table. Examples are:
11759 * 1. META_MODE should precede AEC/AWB/AF MODE
11760 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11761 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11762 * 4. Any mode should precede it's corresponding settings
11763 */
11764 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11765 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11767 rc = BAD_VALUE;
11768 }
11769 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11770 if (rc != NO_ERROR) {
11771 LOGE("extractSceneMode failed");
11772 }
11773 }
11774
11775 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11776 uint8_t fwk_aeMode =
11777 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11778 uint8_t aeMode;
11779 int32_t redeye;
11780
11781 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11782 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011783 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11784 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011785 } else {
11786 aeMode = CAM_AE_MODE_ON;
11787 }
11788 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11789 redeye = 1;
11790 } else {
11791 redeye = 0;
11792 }
11793
11794 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11795 fwk_aeMode);
11796 if (NAME_NOT_FOUND != val) {
11797 int32_t flashMode = (int32_t)val;
11798 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11799 }
11800
11801 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11803 rc = BAD_VALUE;
11804 }
11805 }
11806
11807 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11808 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11809 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11810 fwk_whiteLevel);
11811 if (NAME_NOT_FOUND != val) {
11812 uint8_t whiteLevel = (uint8_t)val;
11813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11814 rc = BAD_VALUE;
11815 }
11816 }
11817 }
11818
11819 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11820 uint8_t fwk_cacMode =
11821 frame_settings.find(
11822 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11823 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11824 fwk_cacMode);
11825 if (NAME_NOT_FOUND != val) {
11826 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11827 bool entryAvailable = FALSE;
11828 // Check whether Frameworks set CAC mode is supported in device or not
11829 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11830 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11831 entryAvailable = TRUE;
11832 break;
11833 }
11834 }
11835 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11836 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11837 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11838 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11839 if (entryAvailable == FALSE) {
11840 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11841 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11842 } else {
11843 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11844 // High is not supported and so set the FAST as spec say's underlying
11845 // device implementation can be the same for both modes.
11846 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11847 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11848 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11849 // in order to avoid the fps drop due to high quality
11850 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11851 } else {
11852 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11853 }
11854 }
11855 }
11856 LOGD("Final cacMode is %d", cacMode);
11857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11858 rc = BAD_VALUE;
11859 }
11860 } else {
11861 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11862 }
11863 }
11864
Thierry Strudel2896d122017-02-23 19:18:03 -080011865 char af_value[PROPERTY_VALUE_MAX];
11866 property_get("persist.camera.af.infinity", af_value, "0");
11867
Jason Lee84ae9972017-02-24 13:24:24 -080011868 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011869 if (atoi(af_value) == 0) {
11870 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011871 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011872 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11873 fwk_focusMode);
11874 if (NAME_NOT_FOUND != val) {
11875 uint8_t focusMode = (uint8_t)val;
11876 LOGD("set focus mode %d", focusMode);
11877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11878 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11879 rc = BAD_VALUE;
11880 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011881 }
11882 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011883 } else {
11884 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11885 LOGE("Focus forced to infinity %d", focusMode);
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11887 rc = BAD_VALUE;
11888 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011889 }
11890
Jason Lee84ae9972017-02-24 13:24:24 -080011891 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11892 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011893 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11895 focalDistance)) {
11896 rc = BAD_VALUE;
11897 }
11898 }
11899
11900 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11901 uint8_t fwk_antibandingMode =
11902 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11903 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11904 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11905 if (NAME_NOT_FOUND != val) {
11906 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011907 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11908 if (m60HzZone) {
11909 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11910 } else {
11911 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11912 }
11913 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11915 hal_antibandingMode)) {
11916 rc = BAD_VALUE;
11917 }
11918 }
11919 }
11920
11921 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11922 int32_t expCompensation = frame_settings.find(
11923 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11924 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11925 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11926 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11927 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011928 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11930 expCompensation)) {
11931 rc = BAD_VALUE;
11932 }
11933 }
11934
11935 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11936 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11938 rc = BAD_VALUE;
11939 }
11940 }
11941 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11942 rc = setHalFpsRange(frame_settings, hal_metadata);
11943 if (rc != NO_ERROR) {
11944 LOGE("setHalFpsRange failed");
11945 }
11946 }
11947
11948 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11949 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11951 rc = BAD_VALUE;
11952 }
11953 }
11954
11955 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11956 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11957 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11958 fwk_effectMode);
11959 if (NAME_NOT_FOUND != val) {
11960 uint8_t effectMode = (uint8_t)val;
11961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965 }
11966
11967 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11968 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11970 colorCorrectMode)) {
11971 rc = BAD_VALUE;
11972 }
11973 }
11974
11975 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11976 cam_color_correct_gains_t colorCorrectGains;
11977 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11978 colorCorrectGains.gains[i] =
11979 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11980 }
11981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11982 colorCorrectGains)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986
11987 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11988 cam_color_correct_matrix_t colorCorrectTransform;
11989 cam_rational_type_t transform_elem;
11990 size_t num = 0;
11991 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11992 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11993 transform_elem.numerator =
11994 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11995 transform_elem.denominator =
11996 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11997 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11998 num++;
11999 }
12000 }
12001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12002 colorCorrectTransform)) {
12003 rc = BAD_VALUE;
12004 }
12005 }
12006
12007 cam_trigger_t aecTrigger;
12008 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12009 aecTrigger.trigger_id = -1;
12010 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12011 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12012 aecTrigger.trigger =
12013 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12014 aecTrigger.trigger_id =
12015 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12017 aecTrigger)) {
12018 rc = BAD_VALUE;
12019 }
12020 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12021 aecTrigger.trigger, aecTrigger.trigger_id);
12022 }
12023
12024 /*af_trigger must come with a trigger id*/
12025 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12026 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12027 cam_trigger_t af_trigger;
12028 af_trigger.trigger =
12029 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12030 af_trigger.trigger_id =
12031 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12033 rc = BAD_VALUE;
12034 }
12035 LOGD("AfTrigger: %d AfTriggerID: %d",
12036 af_trigger.trigger, af_trigger.trigger_id);
12037 }
12038
12039 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12040 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12042 rc = BAD_VALUE;
12043 }
12044 }
12045 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12046 cam_edge_application_t edge_application;
12047 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012048
Thierry Strudel3d639192016-09-09 11:52:26 -070012049 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12050 edge_application.sharpness = 0;
12051 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012052 edge_application.sharpness =
12053 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12054 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12055 int32_t sharpness =
12056 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12057 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12058 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12059 LOGD("Setting edge mode sharpness %d", sharpness);
12060 edge_application.sharpness = sharpness;
12061 }
12062 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012063 }
12064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12065 rc = BAD_VALUE;
12066 }
12067 }
12068
12069 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12070 int32_t respectFlashMode = 1;
12071 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12072 uint8_t fwk_aeMode =
12073 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012074 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12075 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12076 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012077 respectFlashMode = 0;
12078 LOGH("AE Mode controls flash, ignore android.flash.mode");
12079 }
12080 }
12081 if (respectFlashMode) {
12082 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12083 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12084 LOGH("flash mode after mapping %d", val);
12085 // To check: CAM_INTF_META_FLASH_MODE usage
12086 if (NAME_NOT_FOUND != val) {
12087 uint8_t flashMode = (uint8_t)val;
12088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092 }
12093 }
12094
12095 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12096 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101
12102 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12103 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12105 flashFiringTime)) {
12106 rc = BAD_VALUE;
12107 }
12108 }
12109
12110 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12111 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12113 hotPixelMode)) {
12114 rc = BAD_VALUE;
12115 }
12116 }
12117
12118 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12119 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12121 lensAperture)) {
12122 rc = BAD_VALUE;
12123 }
12124 }
12125
12126 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12127 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12129 filterDensity)) {
12130 rc = BAD_VALUE;
12131 }
12132 }
12133
12134 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12135 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12136 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12137 focalLength)) {
12138 rc = BAD_VALUE;
12139 }
12140 }
12141
12142 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12143 uint8_t optStabMode =
12144 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12146 optStabMode)) {
12147 rc = BAD_VALUE;
12148 }
12149 }
12150
12151 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12152 uint8_t videoStabMode =
12153 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12154 LOGD("videoStabMode from APP = %d", videoStabMode);
12155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12156 videoStabMode)) {
12157 rc = BAD_VALUE;
12158 }
12159 }
12160
12161
12162 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12163 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12165 noiseRedMode)) {
12166 rc = BAD_VALUE;
12167 }
12168 }
12169
12170 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12171 float reprocessEffectiveExposureFactor =
12172 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12173 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12174 reprocessEffectiveExposureFactor)) {
12175 rc = BAD_VALUE;
12176 }
12177 }
12178
12179 cam_crop_region_t scalerCropRegion;
12180 bool scalerCropSet = false;
12181 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12182 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12183 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12184 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12185 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12186
12187 // Map coordinate system from active array to sensor output.
12188 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12189 scalerCropRegion.width, scalerCropRegion.height);
12190
12191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12192 scalerCropRegion)) {
12193 rc = BAD_VALUE;
12194 }
12195 scalerCropSet = true;
12196 }
12197
12198 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12199 int64_t sensorExpTime =
12200 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12201 LOGD("setting sensorExpTime %lld", sensorExpTime);
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12203 sensorExpTime)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12209 int64_t sensorFrameDuration =
12210 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012211 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12212 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12213 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12214 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12215 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12216 sensorFrameDuration)) {
12217 rc = BAD_VALUE;
12218 }
12219 }
12220
12221 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12222 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12223 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12224 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12225 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12226 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12227 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12229 sensorSensitivity)) {
12230 rc = BAD_VALUE;
12231 }
12232 }
12233
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012234#ifndef USE_HAL_3_3
12235 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12236 int32_t ispSensitivity =
12237 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12238 if (ispSensitivity <
12239 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12240 ispSensitivity =
12241 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12242 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12243 }
12244 if (ispSensitivity >
12245 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12246 ispSensitivity =
12247 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12248 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12249 }
12250 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12251 ispSensitivity)) {
12252 rc = BAD_VALUE;
12253 }
12254 }
12255#endif
12256
Thierry Strudel3d639192016-09-09 11:52:26 -070012257 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12258 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12260 rc = BAD_VALUE;
12261 }
12262 }
12263
12264 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12265 uint8_t fwk_facedetectMode =
12266 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12267
12268 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12269 fwk_facedetectMode);
12270
12271 if (NAME_NOT_FOUND != val) {
12272 uint8_t facedetectMode = (uint8_t)val;
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12274 facedetectMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278 }
12279
Thierry Strudel54dc9782017-02-15 12:12:10 -080012280 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012281 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012282 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12284 histogramMode)) {
12285 rc = BAD_VALUE;
12286 }
12287 }
12288
12289 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12290 uint8_t sharpnessMapMode =
12291 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12293 sharpnessMapMode)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12299 uint8_t tonemapMode =
12300 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12302 rc = BAD_VALUE;
12303 }
12304 }
12305 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12306 /*All tonemap channels will have the same number of points*/
12307 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12308 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12309 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12310 cam_rgb_tonemap_curves tonemapCurves;
12311 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12312 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12313 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12314 tonemapCurves.tonemap_points_cnt,
12315 CAM_MAX_TONEMAP_CURVE_SIZE);
12316 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12317 }
12318
12319 /* ch0 = G*/
12320 size_t point = 0;
12321 cam_tonemap_curve_t tonemapCurveGreen;
12322 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12323 for (size_t j = 0; j < 2; j++) {
12324 tonemapCurveGreen.tonemap_points[i][j] =
12325 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12326 point++;
12327 }
12328 }
12329 tonemapCurves.curves[0] = tonemapCurveGreen;
12330
12331 /* ch 1 = B */
12332 point = 0;
12333 cam_tonemap_curve_t tonemapCurveBlue;
12334 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12335 for (size_t j = 0; j < 2; j++) {
12336 tonemapCurveBlue.tonemap_points[i][j] =
12337 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12338 point++;
12339 }
12340 }
12341 tonemapCurves.curves[1] = tonemapCurveBlue;
12342
12343 /* ch 2 = R */
12344 point = 0;
12345 cam_tonemap_curve_t tonemapCurveRed;
12346 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12347 for (size_t j = 0; j < 2; j++) {
12348 tonemapCurveRed.tonemap_points[i][j] =
12349 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12350 point++;
12351 }
12352 }
12353 tonemapCurves.curves[2] = tonemapCurveRed;
12354
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12356 tonemapCurves)) {
12357 rc = BAD_VALUE;
12358 }
12359 }
12360
12361 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12362 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12364 captureIntent)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12370 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12371 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12372 blackLevelLock)) {
12373 rc = BAD_VALUE;
12374 }
12375 }
12376
12377 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12378 uint8_t lensShadingMapMode =
12379 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12380 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12381 lensShadingMapMode)) {
12382 rc = BAD_VALUE;
12383 }
12384 }
12385
12386 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12387 cam_area_t roi;
12388 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012389 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012390
12391 // Map coordinate system from active array to sensor output.
12392 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12393 roi.rect.height);
12394
12395 if (scalerCropSet) {
12396 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12397 }
12398 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12399 rc = BAD_VALUE;
12400 }
12401 }
12402
12403 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12404 cam_area_t roi;
12405 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012406 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012407
12408 // Map coordinate system from active array to sensor output.
12409 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12410 roi.rect.height);
12411
12412 if (scalerCropSet) {
12413 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12414 }
12415 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12416 rc = BAD_VALUE;
12417 }
12418 }
12419
12420 // CDS for non-HFR non-video mode
12421 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12422 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12423 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12424 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12425 LOGE("Invalid CDS mode %d!", *fwk_cds);
12426 } else {
12427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12428 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12429 rc = BAD_VALUE;
12430 }
12431 }
12432 }
12433
Thierry Strudel04e026f2016-10-10 11:27:36 -070012434 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012435 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012436 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012437 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12438 }
12439 if (m_bVideoHdrEnabled)
12440 vhdr = CAM_VIDEO_HDR_MODE_ON;
12441
Thierry Strudel54dc9782017-02-15 12:12:10 -080012442 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12443
12444 if(vhdr != curr_hdr_state)
12445 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12446
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012447 rc = setVideoHdrMode(mParameters, vhdr);
12448 if (rc != NO_ERROR) {
12449 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012450 }
12451
12452 //IR
12453 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12454 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12455 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012456 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12457 uint8_t isIRon = 0;
12458
12459 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012460 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12461 LOGE("Invalid IR mode %d!", fwk_ir);
12462 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012463 if(isIRon != curr_ir_state )
12464 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12465
Thierry Strudel04e026f2016-10-10 11:27:36 -070012466 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12467 CAM_INTF_META_IR_MODE, fwk_ir)) {
12468 rc = BAD_VALUE;
12469 }
12470 }
12471 }
12472
Thierry Strudel54dc9782017-02-15 12:12:10 -080012473 //Binning Correction Mode
12474 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12475 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12476 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12477 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12478 || (0 > fwk_binning_correction)) {
12479 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12480 } else {
12481 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12482 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12483 rc = BAD_VALUE;
12484 }
12485 }
12486 }
12487
Thierry Strudel269c81a2016-10-12 12:13:59 -070012488 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12489 float aec_speed;
12490 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12491 LOGD("AEC Speed :%f", aec_speed);
12492 if ( aec_speed < 0 ) {
12493 LOGE("Invalid AEC mode %f!", aec_speed);
12494 } else {
12495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12496 aec_speed)) {
12497 rc = BAD_VALUE;
12498 }
12499 }
12500 }
12501
12502 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12503 float awb_speed;
12504 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12505 LOGD("AWB Speed :%f", awb_speed);
12506 if ( awb_speed < 0 ) {
12507 LOGE("Invalid AWB mode %f!", awb_speed);
12508 } else {
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12510 awb_speed)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514 }
12515
Thierry Strudel3d639192016-09-09 11:52:26 -070012516 // TNR
12517 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12518 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12519 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012520 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012521 cam_denoise_param_t tnr;
12522 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12523 tnr.process_plates =
12524 (cam_denoise_process_type_t)frame_settings.find(
12525 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12526 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012527
12528 if(b_TnrRequested != curr_tnr_state)
12529 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12530
Thierry Strudel3d639192016-09-09 11:52:26 -070012531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535
Thierry Strudel54dc9782017-02-15 12:12:10 -080012536 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012537 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012538 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12540 *exposure_metering_mode)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
Thierry Strudel3d639192016-09-09 11:52:26 -070012545 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12546 int32_t fwk_testPatternMode =
12547 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12548 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12549 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12550
12551 if (NAME_NOT_FOUND != testPatternMode) {
12552 cam_test_pattern_data_t testPatternData;
12553 memset(&testPatternData, 0, sizeof(testPatternData));
12554 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12555 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12556 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12557 int32_t *fwk_testPatternData =
12558 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12559 testPatternData.r = fwk_testPatternData[0];
12560 testPatternData.b = fwk_testPatternData[3];
12561 switch (gCamCapability[mCameraId]->color_arrangement) {
12562 case CAM_FILTER_ARRANGEMENT_RGGB:
12563 case CAM_FILTER_ARRANGEMENT_GRBG:
12564 testPatternData.gr = fwk_testPatternData[1];
12565 testPatternData.gb = fwk_testPatternData[2];
12566 break;
12567 case CAM_FILTER_ARRANGEMENT_GBRG:
12568 case CAM_FILTER_ARRANGEMENT_BGGR:
12569 testPatternData.gr = fwk_testPatternData[2];
12570 testPatternData.gb = fwk_testPatternData[1];
12571 break;
12572 default:
12573 LOGE("color arrangement %d is not supported",
12574 gCamCapability[mCameraId]->color_arrangement);
12575 break;
12576 }
12577 }
12578 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12579 testPatternData)) {
12580 rc = BAD_VALUE;
12581 }
12582 } else {
12583 LOGE("Invalid framework sensor test pattern mode %d",
12584 fwk_testPatternMode);
12585 }
12586 }
12587
12588 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12589 size_t count = 0;
12590 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12591 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12592 gps_coords.data.d, gps_coords.count, count);
12593 if (gps_coords.count != count) {
12594 rc = BAD_VALUE;
12595 }
12596 }
12597
12598 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12599 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12600 size_t count = 0;
12601 const char *gps_methods_src = (const char *)
12602 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12603 memset(gps_methods, '\0', sizeof(gps_methods));
12604 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12605 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12606 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12607 if (GPS_PROCESSING_METHOD_SIZE != count) {
12608 rc = BAD_VALUE;
12609 }
12610 }
12611
12612 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12613 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12615 gps_timestamp)) {
12616 rc = BAD_VALUE;
12617 }
12618 }
12619
12620 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12621 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12622 cam_rotation_info_t rotation_info;
12623 if (orientation == 0) {
12624 rotation_info.rotation = ROTATE_0;
12625 } else if (orientation == 90) {
12626 rotation_info.rotation = ROTATE_90;
12627 } else if (orientation == 180) {
12628 rotation_info.rotation = ROTATE_180;
12629 } else if (orientation == 270) {
12630 rotation_info.rotation = ROTATE_270;
12631 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012632 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012633 rotation_info.streamId = snapshotStreamId;
12634 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12636 rc = BAD_VALUE;
12637 }
12638 }
12639
12640 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12641 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12643 rc = BAD_VALUE;
12644 }
12645 }
12646
12647 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12648 uint32_t thumb_quality = (uint32_t)
12649 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12651 thumb_quality)) {
12652 rc = BAD_VALUE;
12653 }
12654 }
12655
12656 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12657 cam_dimension_t dim;
12658 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12659 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
12665 // Internal metadata
12666 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12667 size_t count = 0;
12668 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12669 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12670 privatedata.data.i32, privatedata.count, count);
12671 if (privatedata.count != count) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012676 // ISO/Exposure Priority
12677 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12678 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12679 cam_priority_mode_t mode =
12680 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12681 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12682 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12683 use_iso_exp_pty.previewOnly = FALSE;
12684 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12685 use_iso_exp_pty.value = *ptr;
12686
12687 if(CAM_ISO_PRIORITY == mode) {
12688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12689 use_iso_exp_pty)) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693 else {
12694 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12695 use_iso_exp_pty)) {
12696 rc = BAD_VALUE;
12697 }
12698 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012699
12700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12701 rc = BAD_VALUE;
12702 }
12703 }
12704 } else {
12705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12706 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012707 }
12708 }
12709
12710 // Saturation
12711 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12712 int32_t* use_saturation =
12713 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12715 rc = BAD_VALUE;
12716 }
12717 }
12718
Thierry Strudel3d639192016-09-09 11:52:26 -070012719 // EV step
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12721 gCamCapability[mCameraId]->exp_compensation_step)) {
12722 rc = BAD_VALUE;
12723 }
12724
12725 // CDS info
12726 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12727 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12728 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12729
12730 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12731 CAM_INTF_META_CDS_DATA, *cdsData)) {
12732 rc = BAD_VALUE;
12733 }
12734 }
12735
Shuzhen Wang19463d72016-03-08 11:09:52 -080012736 // Hybrid AE
12737 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12738 uint8_t *hybrid_ae = (uint8_t *)
12739 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12740
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12742 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
Shuzhen Wang14415f52016-11-16 18:26:18 -080012747 // Histogram
12748 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12749 uint8_t histogramMode =
12750 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12752 histogramMode)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
12757 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12758 int32_t histogramBins =
12759 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12761 histogramBins)) {
12762 rc = BAD_VALUE;
12763 }
12764 }
12765
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012766 // Tracking AF
12767 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12768 uint8_t trackingAfTrigger =
12769 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12771 trackingAfTrigger)) {
12772 rc = BAD_VALUE;
12773 }
12774 }
12775
Thierry Strudel3d639192016-09-09 11:52:26 -070012776 return rc;
12777}
12778
12779/*===========================================================================
12780 * FUNCTION : captureResultCb
12781 *
12782 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12783 *
12784 * PARAMETERS :
12785 * @frame : frame information from mm-camera-interface
12786 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12787 * @userdata: userdata
12788 *
12789 * RETURN : NONE
12790 *==========================================================================*/
12791void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12792 camera3_stream_buffer_t *buffer,
12793 uint32_t frame_number, bool isInputBuffer, void *userdata)
12794{
12795 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12796 if (hw == NULL) {
12797 LOGE("Invalid hw %p", hw);
12798 return;
12799 }
12800
12801 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12802 return;
12803}
12804
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012805/*===========================================================================
12806 * FUNCTION : setBufferErrorStatus
12807 *
12808 * DESCRIPTION: Callback handler for channels to report any buffer errors
12809 *
12810 * PARAMETERS :
12811 * @ch : Channel on which buffer error is reported from
12812 * @frame_number : frame number on which buffer error is reported on
12813 * @buffer_status : buffer error status
12814 * @userdata: userdata
12815 *
12816 * RETURN : NONE
12817 *==========================================================================*/
12818void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12819 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12820{
12821 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12822 if (hw == NULL) {
12823 LOGE("Invalid hw %p", hw);
12824 return;
12825 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012826
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012827 hw->setBufferErrorStatus(ch, frame_number, err);
12828 return;
12829}
12830
12831void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12832 uint32_t frameNumber, camera3_buffer_status_t err)
12833{
12834 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12835 pthread_mutex_lock(&mMutex);
12836
12837 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12838 if (req.frame_number != frameNumber)
12839 continue;
12840 for (auto& k : req.mPendingBufferList) {
12841 if(k.stream->priv == ch) {
12842 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12843 }
12844 }
12845 }
12846
12847 pthread_mutex_unlock(&mMutex);
12848 return;
12849}
Thierry Strudel3d639192016-09-09 11:52:26 -070012850/*===========================================================================
12851 * FUNCTION : initialize
12852 *
12853 * DESCRIPTION: Pass framework callback pointers to HAL
12854 *
12855 * PARAMETERS :
12856 *
12857 *
12858 * RETURN : Success : 0
12859 * Failure: -ENODEV
12860 *==========================================================================*/
12861
12862int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12863 const camera3_callback_ops_t *callback_ops)
12864{
12865 LOGD("E");
12866 QCamera3HardwareInterface *hw =
12867 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12868 if (!hw) {
12869 LOGE("NULL camera device");
12870 return -ENODEV;
12871 }
12872
12873 int rc = hw->initialize(callback_ops);
12874 LOGD("X");
12875 return rc;
12876}
12877
12878/*===========================================================================
12879 * FUNCTION : configure_streams
12880 *
12881 * DESCRIPTION:
12882 *
12883 * PARAMETERS :
12884 *
12885 *
12886 * RETURN : Success: 0
12887 * Failure: -EINVAL (if stream configuration is invalid)
12888 * -ENODEV (fatal error)
12889 *==========================================================================*/
12890
12891int QCamera3HardwareInterface::configure_streams(
12892 const struct camera3_device *device,
12893 camera3_stream_configuration_t *stream_list)
12894{
12895 LOGD("E");
12896 QCamera3HardwareInterface *hw =
12897 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12898 if (!hw) {
12899 LOGE("NULL camera device");
12900 return -ENODEV;
12901 }
12902 int rc = hw->configureStreams(stream_list);
12903 LOGD("X");
12904 return rc;
12905}
12906
12907/*===========================================================================
12908 * FUNCTION : construct_default_request_settings
12909 *
12910 * DESCRIPTION: Configure a settings buffer to meet the required use case
12911 *
12912 * PARAMETERS :
12913 *
12914 *
12915 * RETURN : Success: Return valid metadata
12916 * Failure: Return NULL
12917 *==========================================================================*/
12918const camera_metadata_t* QCamera3HardwareInterface::
12919 construct_default_request_settings(const struct camera3_device *device,
12920 int type)
12921{
12922
12923 LOGD("E");
12924 camera_metadata_t* fwk_metadata = NULL;
12925 QCamera3HardwareInterface *hw =
12926 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12927 if (!hw) {
12928 LOGE("NULL camera device");
12929 return NULL;
12930 }
12931
12932 fwk_metadata = hw->translateCapabilityToMetadata(type);
12933
12934 LOGD("X");
12935 return fwk_metadata;
12936}
12937
12938/*===========================================================================
12939 * FUNCTION : process_capture_request
12940 *
12941 * DESCRIPTION:
12942 *
12943 * PARAMETERS :
12944 *
12945 *
12946 * RETURN :
12947 *==========================================================================*/
12948int QCamera3HardwareInterface::process_capture_request(
12949 const struct camera3_device *device,
12950 camera3_capture_request_t *request)
12951{
12952 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012953 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012954 QCamera3HardwareInterface *hw =
12955 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12956 if (!hw) {
12957 LOGE("NULL camera device");
12958 return -EINVAL;
12959 }
12960
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012961 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012962 LOGD("X");
12963 return rc;
12964}
12965
12966/*===========================================================================
12967 * FUNCTION : dump
12968 *
12969 * DESCRIPTION:
12970 *
12971 * PARAMETERS :
12972 *
12973 *
12974 * RETURN :
12975 *==========================================================================*/
12976
12977void QCamera3HardwareInterface::dump(
12978 const struct camera3_device *device, int fd)
12979{
12980 /* Log level property is read when "adb shell dumpsys media.camera" is
12981 called so that the log level can be controlled without restarting
12982 the media server */
12983 getLogLevel();
12984
12985 LOGD("E");
12986 QCamera3HardwareInterface *hw =
12987 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12988 if (!hw) {
12989 LOGE("NULL camera device");
12990 return;
12991 }
12992
12993 hw->dump(fd);
12994 LOGD("X");
12995 return;
12996}
12997
12998/*===========================================================================
12999 * FUNCTION : flush
13000 *
13001 * DESCRIPTION:
13002 *
13003 * PARAMETERS :
13004 *
13005 *
13006 * RETURN :
13007 *==========================================================================*/
13008
13009int QCamera3HardwareInterface::flush(
13010 const struct camera3_device *device)
13011{
13012 int rc;
13013 LOGD("E");
13014 QCamera3HardwareInterface *hw =
13015 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13016 if (!hw) {
13017 LOGE("NULL camera device");
13018 return -EINVAL;
13019 }
13020
13021 pthread_mutex_lock(&hw->mMutex);
13022 // Validate current state
13023 switch (hw->mState) {
13024 case STARTED:
13025 /* valid state */
13026 break;
13027
13028 case ERROR:
13029 pthread_mutex_unlock(&hw->mMutex);
13030 hw->handleCameraDeviceError();
13031 return -ENODEV;
13032
13033 default:
13034 LOGI("Flush returned during state %d", hw->mState);
13035 pthread_mutex_unlock(&hw->mMutex);
13036 return 0;
13037 }
13038 pthread_mutex_unlock(&hw->mMutex);
13039
13040 rc = hw->flush(true /* restart channels */ );
13041 LOGD("X");
13042 return rc;
13043}
13044
13045/*===========================================================================
13046 * FUNCTION : close_camera_device
13047 *
13048 * DESCRIPTION:
13049 *
13050 * PARAMETERS :
13051 *
13052 *
13053 * RETURN :
13054 *==========================================================================*/
13055int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13056{
13057 int ret = NO_ERROR;
13058 QCamera3HardwareInterface *hw =
13059 reinterpret_cast<QCamera3HardwareInterface *>(
13060 reinterpret_cast<camera3_device_t *>(device)->priv);
13061 if (!hw) {
13062 LOGE("NULL camera device");
13063 return BAD_VALUE;
13064 }
13065
13066 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13067 delete hw;
13068 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013069 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013070 return ret;
13071}
13072
13073/*===========================================================================
13074 * FUNCTION : getWaveletDenoiseProcessPlate
13075 *
13076 * DESCRIPTION: query wavelet denoise process plate
13077 *
13078 * PARAMETERS : None
13079 *
13080 * RETURN : WNR prcocess plate value
13081 *==========================================================================*/
13082cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13083{
13084 char prop[PROPERTY_VALUE_MAX];
13085 memset(prop, 0, sizeof(prop));
13086 property_get("persist.denoise.process.plates", prop, "0");
13087 int processPlate = atoi(prop);
13088 switch(processPlate) {
13089 case 0:
13090 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13091 case 1:
13092 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13093 case 2:
13094 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13095 case 3:
13096 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13097 default:
13098 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13099 }
13100}
13101
13102
13103/*===========================================================================
13104 * FUNCTION : getTemporalDenoiseProcessPlate
13105 *
13106 * DESCRIPTION: query temporal denoise process plate
13107 *
13108 * PARAMETERS : None
13109 *
13110 * RETURN : TNR prcocess plate value
13111 *==========================================================================*/
13112cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13113{
13114 char prop[PROPERTY_VALUE_MAX];
13115 memset(prop, 0, sizeof(prop));
13116 property_get("persist.tnr.process.plates", prop, "0");
13117 int processPlate = atoi(prop);
13118 switch(processPlate) {
13119 case 0:
13120 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13121 case 1:
13122 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13123 case 2:
13124 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13125 case 3:
13126 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13127 default:
13128 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13129 }
13130}
13131
13132
13133/*===========================================================================
13134 * FUNCTION : extractSceneMode
13135 *
13136 * DESCRIPTION: Extract scene mode from frameworks set metadata
13137 *
13138 * PARAMETERS :
13139 * @frame_settings: CameraMetadata reference
13140 * @metaMode: ANDROID_CONTORL_MODE
13141 * @hal_metadata: hal metadata structure
13142 *
13143 * RETURN : None
13144 *==========================================================================*/
13145int32_t QCamera3HardwareInterface::extractSceneMode(
13146 const CameraMetadata &frame_settings, uint8_t metaMode,
13147 metadata_buffer_t *hal_metadata)
13148{
13149 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013150 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13151
13152 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13153 LOGD("Ignoring control mode OFF_KEEP_STATE");
13154 return NO_ERROR;
13155 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013156
13157 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13158 camera_metadata_ro_entry entry =
13159 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13160 if (0 == entry.count)
13161 return rc;
13162
13163 uint8_t fwk_sceneMode = entry.data.u8[0];
13164
13165 int val = lookupHalName(SCENE_MODES_MAP,
13166 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13167 fwk_sceneMode);
13168 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013169 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013170 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013171 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013172 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013173
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013174 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13175 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13176 }
13177
13178 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13179 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013180 cam_hdr_param_t hdr_params;
13181 hdr_params.hdr_enable = 1;
13182 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13183 hdr_params.hdr_need_1x = false;
13184 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13185 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13186 rc = BAD_VALUE;
13187 }
13188 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013189
Thierry Strudel3d639192016-09-09 11:52:26 -070013190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13191 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13192 rc = BAD_VALUE;
13193 }
13194 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013195
13196 if (mForceHdrSnapshot) {
13197 cam_hdr_param_t hdr_params;
13198 hdr_params.hdr_enable = 1;
13199 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13200 hdr_params.hdr_need_1x = false;
13201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13202 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13203 rc = BAD_VALUE;
13204 }
13205 }
13206
Thierry Strudel3d639192016-09-09 11:52:26 -070013207 return rc;
13208}
13209
13210/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013211 * FUNCTION : setVideoHdrMode
13212 *
13213 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13214 *
13215 * PARAMETERS :
13216 * @hal_metadata: hal metadata structure
13217 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13218 *
13219 * RETURN : None
13220 *==========================================================================*/
13221int32_t QCamera3HardwareInterface::setVideoHdrMode(
13222 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13223{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013224 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13225 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13226 }
13227
13228 LOGE("Invalid Video HDR mode %d!", vhdr);
13229 return BAD_VALUE;
13230}
13231
13232/*===========================================================================
13233 * FUNCTION : setSensorHDR
13234 *
13235 * DESCRIPTION: Enable/disable sensor HDR.
13236 *
13237 * PARAMETERS :
13238 * @hal_metadata: hal metadata structure
13239 * @enable: boolean whether to enable/disable sensor HDR
13240 *
13241 * RETURN : None
13242 *==========================================================================*/
13243int32_t QCamera3HardwareInterface::setSensorHDR(
13244 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13245{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013246 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013247 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13248
13249 if (enable) {
13250 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13251 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13252 #ifdef _LE_CAMERA_
13253 //Default to staggered HDR for IOT
13254 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13255 #else
13256 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13257 #endif
13258 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13259 }
13260
13261 bool isSupported = false;
13262 switch (sensor_hdr) {
13263 case CAM_SENSOR_HDR_IN_SENSOR:
13264 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13265 CAM_QCOM_FEATURE_SENSOR_HDR) {
13266 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013267 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013268 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013269 break;
13270 case CAM_SENSOR_HDR_ZIGZAG:
13271 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13272 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13273 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013274 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013275 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013276 break;
13277 case CAM_SENSOR_HDR_STAGGERED:
13278 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13279 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13280 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013281 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013282 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013283 break;
13284 case CAM_SENSOR_HDR_OFF:
13285 isSupported = true;
13286 LOGD("Turning off sensor HDR");
13287 break;
13288 default:
13289 LOGE("HDR mode %d not supported", sensor_hdr);
13290 rc = BAD_VALUE;
13291 break;
13292 }
13293
13294 if(isSupported) {
13295 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13296 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13297 rc = BAD_VALUE;
13298 } else {
13299 if(!isVideoHdrEnable)
13300 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013301 }
13302 }
13303 return rc;
13304}
13305
13306/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013307 * FUNCTION : needRotationReprocess
13308 *
13309 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13310 *
13311 * PARAMETERS : none
13312 *
13313 * RETURN : true: needed
13314 * false: no need
13315 *==========================================================================*/
13316bool QCamera3HardwareInterface::needRotationReprocess()
13317{
13318 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13319 // current rotation is not zero, and pp has the capability to process rotation
13320 LOGH("need do reprocess for rotation");
13321 return true;
13322 }
13323
13324 return false;
13325}
13326
13327/*===========================================================================
13328 * FUNCTION : needReprocess
13329 *
13330 * DESCRIPTION: if reprocess in needed
13331 *
13332 * PARAMETERS : none
13333 *
13334 * RETURN : true: needed
13335 * false: no need
13336 *==========================================================================*/
13337bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13338{
13339 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13340 // TODO: add for ZSL HDR later
13341 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13342 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13343 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13344 return true;
13345 } else {
13346 LOGH("already post processed frame");
13347 return false;
13348 }
13349 }
13350 return needRotationReprocess();
13351}
13352
13353/*===========================================================================
13354 * FUNCTION : needJpegExifRotation
13355 *
13356 * DESCRIPTION: if rotation from jpeg is needed
13357 *
13358 * PARAMETERS : none
13359 *
13360 * RETURN : true: needed
13361 * false: no need
13362 *==========================================================================*/
13363bool QCamera3HardwareInterface::needJpegExifRotation()
13364{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013365 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013366 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13367 LOGD("Need use Jpeg EXIF Rotation");
13368 return true;
13369 }
13370 return false;
13371}
13372
13373/*===========================================================================
13374 * FUNCTION : addOfflineReprocChannel
13375 *
13376 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13377 * coming from input channel
13378 *
13379 * PARAMETERS :
13380 * @config : reprocess configuration
13381 * @inputChHandle : pointer to the input (source) channel
13382 *
13383 *
13384 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13385 *==========================================================================*/
13386QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13387 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13388{
13389 int32_t rc = NO_ERROR;
13390 QCamera3ReprocessChannel *pChannel = NULL;
13391
13392 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013393 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13394 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013395 if (NULL == pChannel) {
13396 LOGE("no mem for reprocess channel");
13397 return NULL;
13398 }
13399
13400 rc = pChannel->initialize(IS_TYPE_NONE);
13401 if (rc != NO_ERROR) {
13402 LOGE("init reprocess channel failed, ret = %d", rc);
13403 delete pChannel;
13404 return NULL;
13405 }
13406
13407 // pp feature config
13408 cam_pp_feature_config_t pp_config;
13409 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13410
13411 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13412 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13413 & CAM_QCOM_FEATURE_DSDN) {
13414 //Use CPP CDS incase h/w supports it.
13415 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13416 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13417 }
13418 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13419 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13420 }
13421
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013422 if (config.hdr_param.hdr_enable) {
13423 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13424 pp_config.hdr_param = config.hdr_param;
13425 }
13426
13427 if (mForceHdrSnapshot) {
13428 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13429 pp_config.hdr_param.hdr_enable = 1;
13430 pp_config.hdr_param.hdr_need_1x = 0;
13431 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13432 }
13433
Thierry Strudel3d639192016-09-09 11:52:26 -070013434 rc = pChannel->addReprocStreamsFromSource(pp_config,
13435 config,
13436 IS_TYPE_NONE,
13437 mMetadataChannel);
13438
13439 if (rc != NO_ERROR) {
13440 delete pChannel;
13441 return NULL;
13442 }
13443 return pChannel;
13444}
13445
13446/*===========================================================================
13447 * FUNCTION : getMobicatMask
13448 *
13449 * DESCRIPTION: returns mobicat mask
13450 *
13451 * PARAMETERS : none
13452 *
13453 * RETURN : mobicat mask
13454 *
13455 *==========================================================================*/
13456uint8_t QCamera3HardwareInterface::getMobicatMask()
13457{
13458 return m_MobicatMask;
13459}
13460
13461/*===========================================================================
13462 * FUNCTION : setMobicat
13463 *
13464 * DESCRIPTION: set Mobicat on/off.
13465 *
13466 * PARAMETERS :
13467 * @params : none
13468 *
13469 * RETURN : int32_t type of status
13470 * NO_ERROR -- success
13471 * none-zero failure code
13472 *==========================================================================*/
13473int32_t QCamera3HardwareInterface::setMobicat()
13474{
13475 char value [PROPERTY_VALUE_MAX];
13476 property_get("persist.camera.mobicat", value, "0");
13477 int32_t ret = NO_ERROR;
13478 uint8_t enableMobi = (uint8_t)atoi(value);
13479
13480 if (enableMobi) {
13481 tune_cmd_t tune_cmd;
13482 tune_cmd.type = SET_RELOAD_CHROMATIX;
13483 tune_cmd.module = MODULE_ALL;
13484 tune_cmd.value = TRUE;
13485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13486 CAM_INTF_PARM_SET_VFE_COMMAND,
13487 tune_cmd);
13488
13489 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13490 CAM_INTF_PARM_SET_PP_COMMAND,
13491 tune_cmd);
13492 }
13493 m_MobicatMask = enableMobi;
13494
13495 return ret;
13496}
13497
13498/*===========================================================================
13499* FUNCTION : getLogLevel
13500*
13501* DESCRIPTION: Reads the log level property into a variable
13502*
13503* PARAMETERS :
13504* None
13505*
13506* RETURN :
13507* None
13508*==========================================================================*/
13509void QCamera3HardwareInterface::getLogLevel()
13510{
13511 char prop[PROPERTY_VALUE_MAX];
13512 uint32_t globalLogLevel = 0;
13513
13514 property_get("persist.camera.hal.debug", prop, "0");
13515 int val = atoi(prop);
13516 if (0 <= val) {
13517 gCamHal3LogLevel = (uint32_t)val;
13518 }
13519
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013520 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013521 gKpiDebugLevel = atoi(prop);
13522
13523 property_get("persist.camera.global.debug", prop, "0");
13524 val = atoi(prop);
13525 if (0 <= val) {
13526 globalLogLevel = (uint32_t)val;
13527 }
13528
13529 /* Highest log level among hal.logs and global.logs is selected */
13530 if (gCamHal3LogLevel < globalLogLevel)
13531 gCamHal3LogLevel = globalLogLevel;
13532
13533 return;
13534}
13535
13536/*===========================================================================
13537 * FUNCTION : validateStreamRotations
13538 *
13539 * DESCRIPTION: Check if the rotations requested are supported
13540 *
13541 * PARAMETERS :
13542 * @stream_list : streams to be configured
13543 *
13544 * RETURN : NO_ERROR on success
13545 * -EINVAL on failure
13546 *
13547 *==========================================================================*/
13548int QCamera3HardwareInterface::validateStreamRotations(
13549 camera3_stream_configuration_t *streamList)
13550{
13551 int rc = NO_ERROR;
13552
13553 /*
13554 * Loop through all streams requested in configuration
13555 * Check if unsupported rotations have been requested on any of them
13556 */
13557 for (size_t j = 0; j < streamList->num_streams; j++){
13558 camera3_stream_t *newStream = streamList->streams[j];
13559
13560 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13561 bool isImplDef = (newStream->format ==
13562 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13563 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13564 isImplDef);
13565
13566 if (isRotated && (!isImplDef || isZsl)) {
13567 LOGE("Error: Unsupported rotation of %d requested for stream"
13568 "type:%d and stream format:%d",
13569 newStream->rotation, newStream->stream_type,
13570 newStream->format);
13571 rc = -EINVAL;
13572 break;
13573 }
13574 }
13575
13576 return rc;
13577}
13578
13579/*===========================================================================
13580* FUNCTION : getFlashInfo
13581*
13582* DESCRIPTION: Retrieve information about whether the device has a flash.
13583*
13584* PARAMETERS :
13585* @cameraId : Camera id to query
13586* @hasFlash : Boolean indicating whether there is a flash device
13587* associated with given camera
13588* @flashNode : If a flash device exists, this will be its device node.
13589*
13590* RETURN :
13591* None
13592*==========================================================================*/
13593void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13594 bool& hasFlash,
13595 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13596{
13597 cam_capability_t* camCapability = gCamCapability[cameraId];
13598 if (NULL == camCapability) {
13599 hasFlash = false;
13600 flashNode[0] = '\0';
13601 } else {
13602 hasFlash = camCapability->flash_available;
13603 strlcpy(flashNode,
13604 (char*)camCapability->flash_dev_name,
13605 QCAMERA_MAX_FILEPATH_LENGTH);
13606 }
13607}
13608
13609/*===========================================================================
13610* FUNCTION : getEepromVersionInfo
13611*
13612* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13613*
13614* PARAMETERS : None
13615*
13616* RETURN : string describing EEPROM version
13617* "\0" if no such info available
13618*==========================================================================*/
13619const char *QCamera3HardwareInterface::getEepromVersionInfo()
13620{
13621 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13622}
13623
13624/*===========================================================================
13625* FUNCTION : getLdafCalib
13626*
13627* DESCRIPTION: Retrieve Laser AF calibration data
13628*
13629* PARAMETERS : None
13630*
13631* RETURN : Two uint32_t describing laser AF calibration data
13632* NULL if none is available.
13633*==========================================================================*/
13634const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13635{
13636 if (mLdafCalibExist) {
13637 return &mLdafCalib[0];
13638 } else {
13639 return NULL;
13640 }
13641}
13642
13643/*===========================================================================
13644 * FUNCTION : dynamicUpdateMetaStreamInfo
13645 *
13646 * DESCRIPTION: This function:
13647 * (1) stops all the channels
13648 * (2) returns error on pending requests and buffers
13649 * (3) sends metastream_info in setparams
13650 * (4) starts all channels
13651 * This is useful when sensor has to be restarted to apply any
13652 * settings such as frame rate from a different sensor mode
13653 *
13654 * PARAMETERS : None
13655 *
13656 * RETURN : NO_ERROR on success
13657 * Error codes on failure
13658 *
13659 *==========================================================================*/
13660int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13661{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013662 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013663 int rc = NO_ERROR;
13664
13665 LOGD("E");
13666
13667 rc = stopAllChannels();
13668 if (rc < 0) {
13669 LOGE("stopAllChannels failed");
13670 return rc;
13671 }
13672
13673 rc = notifyErrorForPendingRequests();
13674 if (rc < 0) {
13675 LOGE("notifyErrorForPendingRequests failed");
13676 return rc;
13677 }
13678
13679 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13680 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13681 "Format:%d",
13682 mStreamConfigInfo.type[i],
13683 mStreamConfigInfo.stream_sizes[i].width,
13684 mStreamConfigInfo.stream_sizes[i].height,
13685 mStreamConfigInfo.postprocess_mask[i],
13686 mStreamConfigInfo.format[i]);
13687 }
13688
13689 /* Send meta stream info once again so that ISP can start */
13690 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13691 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13692 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13693 mParameters);
13694 if (rc < 0) {
13695 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13696 }
13697
13698 rc = startAllChannels();
13699 if (rc < 0) {
13700 LOGE("startAllChannels failed");
13701 return rc;
13702 }
13703
13704 LOGD("X");
13705 return rc;
13706}
13707
13708/*===========================================================================
13709 * FUNCTION : stopAllChannels
13710 *
13711 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13712 *
13713 * PARAMETERS : None
13714 *
13715 * RETURN : NO_ERROR on success
13716 * Error codes on failure
13717 *
13718 *==========================================================================*/
13719int32_t QCamera3HardwareInterface::stopAllChannels()
13720{
13721 int32_t rc = NO_ERROR;
13722
13723 LOGD("Stopping all channels");
13724 // Stop the Streams/Channels
13725 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13726 it != mStreamInfo.end(); it++) {
13727 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13728 if (channel) {
13729 channel->stop();
13730 }
13731 (*it)->status = INVALID;
13732 }
13733
13734 if (mSupportChannel) {
13735 mSupportChannel->stop();
13736 }
13737 if (mAnalysisChannel) {
13738 mAnalysisChannel->stop();
13739 }
13740 if (mRawDumpChannel) {
13741 mRawDumpChannel->stop();
13742 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013743 if (mHdrPlusRawSrcChannel) {
13744 mHdrPlusRawSrcChannel->stop();
13745 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013746 if (mMetadataChannel) {
13747 /* If content of mStreamInfo is not 0, there is metadata stream */
13748 mMetadataChannel->stop();
13749 }
13750
13751 LOGD("All channels stopped");
13752 return rc;
13753}
13754
13755/*===========================================================================
13756 * FUNCTION : startAllChannels
13757 *
13758 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13759 *
13760 * PARAMETERS : None
13761 *
13762 * RETURN : NO_ERROR on success
13763 * Error codes on failure
13764 *
13765 *==========================================================================*/
13766int32_t QCamera3HardwareInterface::startAllChannels()
13767{
13768 int32_t rc = NO_ERROR;
13769
13770 LOGD("Start all channels ");
13771 // Start the Streams/Channels
13772 if (mMetadataChannel) {
13773 /* If content of mStreamInfo is not 0, there is metadata stream */
13774 rc = mMetadataChannel->start();
13775 if (rc < 0) {
13776 LOGE("META channel start failed");
13777 return rc;
13778 }
13779 }
13780 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13781 it != mStreamInfo.end(); it++) {
13782 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13783 if (channel) {
13784 rc = channel->start();
13785 if (rc < 0) {
13786 LOGE("channel start failed");
13787 return rc;
13788 }
13789 }
13790 }
13791 if (mAnalysisChannel) {
13792 mAnalysisChannel->start();
13793 }
13794 if (mSupportChannel) {
13795 rc = mSupportChannel->start();
13796 if (rc < 0) {
13797 LOGE("Support channel start failed");
13798 return rc;
13799 }
13800 }
13801 if (mRawDumpChannel) {
13802 rc = mRawDumpChannel->start();
13803 if (rc < 0) {
13804 LOGE("RAW dump channel start failed");
13805 return rc;
13806 }
13807 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013808 if (mHdrPlusRawSrcChannel) {
13809 rc = mHdrPlusRawSrcChannel->start();
13810 if (rc < 0) {
13811 LOGE("HDR+ RAW channel start failed");
13812 return rc;
13813 }
13814 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013815
13816 LOGD("All channels started");
13817 return rc;
13818}
13819
13820/*===========================================================================
13821 * FUNCTION : notifyErrorForPendingRequests
13822 *
13823 * DESCRIPTION: This function sends error for all the pending requests/buffers
13824 *
13825 * PARAMETERS : None
13826 *
13827 * RETURN : Error codes
13828 * NO_ERROR on success
13829 *
13830 *==========================================================================*/
13831int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13832{
13833 int32_t rc = NO_ERROR;
13834 unsigned int frameNum = 0;
13835 camera3_capture_result_t result;
13836 camera3_stream_buffer_t *pStream_Buf = NULL;
13837
13838 memset(&result, 0, sizeof(camera3_capture_result_t));
13839
13840 if (mPendingRequestsList.size() > 0) {
13841 pendingRequestIterator i = mPendingRequestsList.begin();
13842 frameNum = i->frame_number;
13843 } else {
13844 /* There might still be pending buffers even though there are
13845 no pending requests. Setting the frameNum to MAX so that
13846 all the buffers with smaller frame numbers are returned */
13847 frameNum = UINT_MAX;
13848 }
13849
13850 LOGH("Oldest frame num on mPendingRequestsList = %u",
13851 frameNum);
13852
Emilian Peev7650c122017-01-19 08:24:33 -080013853 notifyErrorFoPendingDepthData(mDepthChannel);
13854
Thierry Strudel3d639192016-09-09 11:52:26 -070013855 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13856 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13857
13858 if (req->frame_number < frameNum) {
13859 // Send Error notify to frameworks for each buffer for which
13860 // metadata buffer is already sent
13861 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13862 req->frame_number, req->mPendingBufferList.size());
13863
13864 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13865 if (NULL == pStream_Buf) {
13866 LOGE("No memory for pending buffers array");
13867 return NO_MEMORY;
13868 }
13869 memset(pStream_Buf, 0,
13870 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13871 result.result = NULL;
13872 result.frame_number = req->frame_number;
13873 result.num_output_buffers = req->mPendingBufferList.size();
13874 result.output_buffers = pStream_Buf;
13875
13876 size_t index = 0;
13877 for (auto info = req->mPendingBufferList.begin();
13878 info != req->mPendingBufferList.end(); ) {
13879
13880 camera3_notify_msg_t notify_msg;
13881 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13882 notify_msg.type = CAMERA3_MSG_ERROR;
13883 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13884 notify_msg.message.error.error_stream = info->stream;
13885 notify_msg.message.error.frame_number = req->frame_number;
13886 pStream_Buf[index].acquire_fence = -1;
13887 pStream_Buf[index].release_fence = -1;
13888 pStream_Buf[index].buffer = info->buffer;
13889 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13890 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013891 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013892 index++;
13893 // Remove buffer from list
13894 info = req->mPendingBufferList.erase(info);
13895 }
13896
13897 // Remove this request from Map
13898 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13899 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13900 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13901
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013902 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013903
13904 delete [] pStream_Buf;
13905 } else {
13906
13907 // Go through the pending requests info and send error request to framework
13908 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13909
13910 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13911
13912 // Send error notify to frameworks
13913 camera3_notify_msg_t notify_msg;
13914 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13915 notify_msg.type = CAMERA3_MSG_ERROR;
13916 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13917 notify_msg.message.error.error_stream = NULL;
13918 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013919 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013920
13921 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13922 if (NULL == pStream_Buf) {
13923 LOGE("No memory for pending buffers array");
13924 return NO_MEMORY;
13925 }
13926 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13927
13928 result.result = NULL;
13929 result.frame_number = req->frame_number;
13930 result.input_buffer = i->input_buffer;
13931 result.num_output_buffers = req->mPendingBufferList.size();
13932 result.output_buffers = pStream_Buf;
13933
13934 size_t index = 0;
13935 for (auto info = req->mPendingBufferList.begin();
13936 info != req->mPendingBufferList.end(); ) {
13937 pStream_Buf[index].acquire_fence = -1;
13938 pStream_Buf[index].release_fence = -1;
13939 pStream_Buf[index].buffer = info->buffer;
13940 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13941 pStream_Buf[index].stream = info->stream;
13942 index++;
13943 // Remove buffer from list
13944 info = req->mPendingBufferList.erase(info);
13945 }
13946
13947 // Remove this request from Map
13948 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13949 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13950 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13951
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013952 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013953 delete [] pStream_Buf;
13954 i = erasePendingRequest(i);
13955 }
13956 }
13957
13958 /* Reset pending frame Drop list and requests list */
13959 mPendingFrameDropList.clear();
13960
13961 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13962 req.mPendingBufferList.clear();
13963 }
13964 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013965 LOGH("Cleared all the pending buffers ");
13966
13967 return rc;
13968}
13969
13970bool QCamera3HardwareInterface::isOnEncoder(
13971 const cam_dimension_t max_viewfinder_size,
13972 uint32_t width, uint32_t height)
13973{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013974 return ((width > (uint32_t)max_viewfinder_size.width) ||
13975 (height > (uint32_t)max_viewfinder_size.height) ||
13976 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13977 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013978}
13979
13980/*===========================================================================
13981 * FUNCTION : setBundleInfo
13982 *
13983 * DESCRIPTION: Set bundle info for all streams that are bundle.
13984 *
13985 * PARAMETERS : None
13986 *
13987 * RETURN : NO_ERROR on success
13988 * Error codes on failure
13989 *==========================================================================*/
13990int32_t QCamera3HardwareInterface::setBundleInfo()
13991{
13992 int32_t rc = NO_ERROR;
13993
13994 if (mChannelHandle) {
13995 cam_bundle_config_t bundleInfo;
13996 memset(&bundleInfo, 0, sizeof(bundleInfo));
13997 rc = mCameraHandle->ops->get_bundle_info(
13998 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13999 if (rc != NO_ERROR) {
14000 LOGE("get_bundle_info failed");
14001 return rc;
14002 }
14003 if (mAnalysisChannel) {
14004 mAnalysisChannel->setBundleInfo(bundleInfo);
14005 }
14006 if (mSupportChannel) {
14007 mSupportChannel->setBundleInfo(bundleInfo);
14008 }
14009 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14010 it != mStreamInfo.end(); it++) {
14011 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14012 channel->setBundleInfo(bundleInfo);
14013 }
14014 if (mRawDumpChannel) {
14015 mRawDumpChannel->setBundleInfo(bundleInfo);
14016 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014017 if (mHdrPlusRawSrcChannel) {
14018 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14019 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014020 }
14021
14022 return rc;
14023}
14024
14025/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014026 * FUNCTION : setInstantAEC
14027 *
14028 * DESCRIPTION: Set Instant AEC related params.
14029 *
14030 * PARAMETERS :
14031 * @meta: CameraMetadata reference
14032 *
14033 * RETURN : NO_ERROR on success
14034 * Error codes on failure
14035 *==========================================================================*/
14036int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14037{
14038 int32_t rc = NO_ERROR;
14039 uint8_t val = 0;
14040 char prop[PROPERTY_VALUE_MAX];
14041
14042 // First try to configure instant AEC from framework metadata
14043 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14044 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14045 }
14046
14047 // If framework did not set this value, try to read from set prop.
14048 if (val == 0) {
14049 memset(prop, 0, sizeof(prop));
14050 property_get("persist.camera.instant.aec", prop, "0");
14051 val = (uint8_t)atoi(prop);
14052 }
14053
14054 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14055 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14056 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14057 mInstantAEC = val;
14058 mInstantAECSettledFrameNumber = 0;
14059 mInstantAecFrameIdxCount = 0;
14060 LOGH("instantAEC value set %d",val);
14061 if (mInstantAEC) {
14062 memset(prop, 0, sizeof(prop));
14063 property_get("persist.camera.ae.instant.bound", prop, "10");
14064 int32_t aec_frame_skip_cnt = atoi(prop);
14065 if (aec_frame_skip_cnt >= 0) {
14066 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14067 } else {
14068 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14069 rc = BAD_VALUE;
14070 }
14071 }
14072 } else {
14073 LOGE("Bad instant aec value set %d", val);
14074 rc = BAD_VALUE;
14075 }
14076 return rc;
14077}
14078
14079/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014080 * FUNCTION : get_num_overall_buffers
14081 *
14082 * DESCRIPTION: Estimate number of pending buffers across all requests.
14083 *
14084 * PARAMETERS : None
14085 *
14086 * RETURN : Number of overall pending buffers
14087 *
14088 *==========================================================================*/
14089uint32_t PendingBuffersMap::get_num_overall_buffers()
14090{
14091 uint32_t sum_buffers = 0;
14092 for (auto &req : mPendingBuffersInRequest) {
14093 sum_buffers += req.mPendingBufferList.size();
14094 }
14095 return sum_buffers;
14096}
14097
14098/*===========================================================================
14099 * FUNCTION : removeBuf
14100 *
14101 * DESCRIPTION: Remove a matching buffer from tracker.
14102 *
14103 * PARAMETERS : @buffer: image buffer for the callback
14104 *
14105 * RETURN : None
14106 *
14107 *==========================================================================*/
14108void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14109{
14110 bool buffer_found = false;
14111 for (auto req = mPendingBuffersInRequest.begin();
14112 req != mPendingBuffersInRequest.end(); req++) {
14113 for (auto k = req->mPendingBufferList.begin();
14114 k != req->mPendingBufferList.end(); k++ ) {
14115 if (k->buffer == buffer) {
14116 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14117 req->frame_number, buffer);
14118 k = req->mPendingBufferList.erase(k);
14119 if (req->mPendingBufferList.empty()) {
14120 // Remove this request from Map
14121 req = mPendingBuffersInRequest.erase(req);
14122 }
14123 buffer_found = true;
14124 break;
14125 }
14126 }
14127 if (buffer_found) {
14128 break;
14129 }
14130 }
14131 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14132 get_num_overall_buffers());
14133}
14134
14135/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014136 * FUNCTION : getBufErrStatus
14137 *
14138 * DESCRIPTION: get buffer error status
14139 *
14140 * PARAMETERS : @buffer: buffer handle
14141 *
14142 * RETURN : Error status
14143 *
14144 *==========================================================================*/
14145int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14146{
14147 for (auto& req : mPendingBuffersInRequest) {
14148 for (auto& k : req.mPendingBufferList) {
14149 if (k.buffer == buffer)
14150 return k.bufStatus;
14151 }
14152 }
14153 return CAMERA3_BUFFER_STATUS_OK;
14154}
14155
14156/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014157 * FUNCTION : setPAAFSupport
14158 *
14159 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14160 * feature mask according to stream type and filter
14161 * arrangement
14162 *
14163 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14164 * @stream_type: stream type
14165 * @filter_arrangement: filter arrangement
14166 *
14167 * RETURN : None
14168 *==========================================================================*/
14169void QCamera3HardwareInterface::setPAAFSupport(
14170 cam_feature_mask_t& feature_mask,
14171 cam_stream_type_t stream_type,
14172 cam_color_filter_arrangement_t filter_arrangement)
14173{
Thierry Strudel3d639192016-09-09 11:52:26 -070014174 switch (filter_arrangement) {
14175 case CAM_FILTER_ARRANGEMENT_RGGB:
14176 case CAM_FILTER_ARRANGEMENT_GRBG:
14177 case CAM_FILTER_ARRANGEMENT_GBRG:
14178 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014179 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14180 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014181 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014182 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14183 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014184 }
14185 break;
14186 case CAM_FILTER_ARRANGEMENT_Y:
14187 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14188 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14189 }
14190 break;
14191 default:
14192 break;
14193 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014194 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14195 feature_mask, stream_type, filter_arrangement);
14196
14197
Thierry Strudel3d639192016-09-09 11:52:26 -070014198}
14199
14200/*===========================================================================
14201* FUNCTION : getSensorMountAngle
14202*
14203* DESCRIPTION: Retrieve sensor mount angle
14204*
14205* PARAMETERS : None
14206*
14207* RETURN : sensor mount angle in uint32_t
14208*==========================================================================*/
14209uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14210{
14211 return gCamCapability[mCameraId]->sensor_mount_angle;
14212}
14213
14214/*===========================================================================
14215* FUNCTION : getRelatedCalibrationData
14216*
14217* DESCRIPTION: Retrieve related system calibration data
14218*
14219* PARAMETERS : None
14220*
14221* RETURN : Pointer of related system calibration data
14222*==========================================================================*/
14223const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14224{
14225 return (const cam_related_system_calibration_data_t *)
14226 &(gCamCapability[mCameraId]->related_cam_calibration);
14227}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014228
14229/*===========================================================================
14230 * FUNCTION : is60HzZone
14231 *
14232 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14233 *
14234 * PARAMETERS : None
14235 *
14236 * RETURN : True if in 60Hz zone, False otherwise
14237 *==========================================================================*/
14238bool QCamera3HardwareInterface::is60HzZone()
14239{
14240 time_t t = time(NULL);
14241 struct tm lt;
14242
14243 struct tm* r = localtime_r(&t, &lt);
14244
14245 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14246 return true;
14247 else
14248 return false;
14249}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014250
14251/*===========================================================================
14252 * FUNCTION : adjustBlackLevelForCFA
14253 *
14254 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14255 * of bayer CFA (Color Filter Array).
14256 *
14257 * PARAMETERS : @input: black level pattern in the order of RGGB
14258 * @output: black level pattern in the order of CFA
14259 * @color_arrangement: CFA color arrangement
14260 *
14261 * RETURN : None
14262 *==========================================================================*/
14263template<typename T>
14264void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14265 T input[BLACK_LEVEL_PATTERN_CNT],
14266 T output[BLACK_LEVEL_PATTERN_CNT],
14267 cam_color_filter_arrangement_t color_arrangement)
14268{
14269 switch (color_arrangement) {
14270 case CAM_FILTER_ARRANGEMENT_GRBG:
14271 output[0] = input[1];
14272 output[1] = input[0];
14273 output[2] = input[3];
14274 output[3] = input[2];
14275 break;
14276 case CAM_FILTER_ARRANGEMENT_GBRG:
14277 output[0] = input[2];
14278 output[1] = input[3];
14279 output[2] = input[0];
14280 output[3] = input[1];
14281 break;
14282 case CAM_FILTER_ARRANGEMENT_BGGR:
14283 output[0] = input[3];
14284 output[1] = input[2];
14285 output[2] = input[1];
14286 output[3] = input[0];
14287 break;
14288 case CAM_FILTER_ARRANGEMENT_RGGB:
14289 output[0] = input[0];
14290 output[1] = input[1];
14291 output[2] = input[2];
14292 output[3] = input[3];
14293 break;
14294 default:
14295 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14296 break;
14297 }
14298}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014299
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014300void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14301 CameraMetadata &resultMetadata,
14302 std::shared_ptr<metadata_buffer_t> settings)
14303{
14304 if (settings == nullptr) {
14305 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14306 return;
14307 }
14308
14309 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14310 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14311 }
14312
14313 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14314 String8 str((const char *)gps_methods);
14315 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14316 }
14317
14318 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14319 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14320 }
14321
14322 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14323 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14324 }
14325
14326 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14327 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14328 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14329 }
14330
14331 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14332 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14333 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14334 }
14335
14336 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14337 int32_t fwk_thumb_size[2];
14338 fwk_thumb_size[0] = thumb_size->width;
14339 fwk_thumb_size[1] = thumb_size->height;
14340 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14341 }
14342
14343 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14344 uint8_t fwk_intent = intent[0];
14345 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14346 }
14347}
14348
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014349bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14350 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14351 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014352{
14353 if (hdrPlusRequest == nullptr) return false;
14354
14355 // Check noise reduction mode is high quality.
14356 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14357 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14358 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014359 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14360 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014361 return false;
14362 }
14363
14364 // Check edge mode is high quality.
14365 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14366 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14367 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14368 return false;
14369 }
14370
14371 if (request.num_output_buffers != 1 ||
14372 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14373 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014374 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14375 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14376 request.output_buffers[0].stream->width,
14377 request.output_buffers[0].stream->height,
14378 request.output_buffers[0].stream->format);
14379 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014380 return false;
14381 }
14382
14383 // Get a YUV buffer from pic channel.
14384 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14385 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14386 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14387 if (res != OK) {
14388 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14389 __FUNCTION__, strerror(-res), res);
14390 return false;
14391 }
14392
14393 pbcamera::StreamBuffer buffer;
14394 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014395 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014396 buffer.data = yuvBuffer->buffer;
14397 buffer.dataSize = yuvBuffer->frame_len;
14398
14399 pbcamera::CaptureRequest pbRequest;
14400 pbRequest.id = request.frame_number;
14401 pbRequest.outputBuffers.push_back(buffer);
14402
14403 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014404 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014405 if (res != OK) {
14406 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14407 strerror(-res), res);
14408 return false;
14409 }
14410
14411 hdrPlusRequest->yuvBuffer = yuvBuffer;
14412 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14413
14414 return true;
14415}
14416
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014417status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14418 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14419 return OK;
14420 }
14421
14422 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14423 if (res != OK) {
14424 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14425 strerror(-res), res);
14426 return res;
14427 }
14428 gHdrPlusClientOpening = true;
14429
14430 return OK;
14431}
14432
Chien-Yu Chenee335912017-02-09 17:53:20 -080014433status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14434{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014435 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014436
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014437 // Check if gHdrPlusClient is opened or being opened.
14438 if (gHdrPlusClient == nullptr) {
14439 if (gHdrPlusClientOpening) {
14440 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14441 return OK;
14442 }
14443
14444 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014445 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014446 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14447 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014448 return res;
14449 }
14450
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014451 // When opening HDR+ client completes, HDR+ mode will be enabled.
14452 return OK;
14453
Chien-Yu Chenee335912017-02-09 17:53:20 -080014454 }
14455
14456 // Configure stream for HDR+.
14457 res = configureHdrPlusStreamsLocked();
14458 if (res != OK) {
14459 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014460 return res;
14461 }
14462
14463 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14464 res = gHdrPlusClient->setZslHdrPlusMode(true);
14465 if (res != OK) {
14466 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014467 return res;
14468 }
14469
14470 mHdrPlusModeEnabled = true;
14471 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14472
14473 return OK;
14474}
14475
14476void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14477{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014478 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014479 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014480 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14481 if (res != OK) {
14482 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14483 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014484
14485 // Close HDR+ client so Easel can enter low power mode.
14486 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14487 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014488 }
14489
14490 mHdrPlusModeEnabled = false;
14491 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14492}
14493
14494status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014495{
14496 pbcamera::InputConfiguration inputConfig;
14497 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14498 status_t res = OK;
14499
14500 // Configure HDR+ client streams.
14501 // Get input config.
14502 if (mHdrPlusRawSrcChannel) {
14503 // HDR+ input buffers will be provided by HAL.
14504 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14505 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14506 if (res != OK) {
14507 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14508 __FUNCTION__, strerror(-res), res);
14509 return res;
14510 }
14511
14512 inputConfig.isSensorInput = false;
14513 } else {
14514 // Sensor MIPI will send data to Easel.
14515 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014516 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014517 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14518 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14519 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14520 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14521 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14522 if (mSensorModeInfo.num_raw_bits != 10) {
14523 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14524 mSensorModeInfo.num_raw_bits);
14525 return BAD_VALUE;
14526 }
14527
14528 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014529 }
14530
14531 // Get output configurations.
14532 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014533 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014534
14535 // Easel may need to output YUV output buffers if mPictureChannel was created.
14536 pbcamera::StreamConfiguration yuvOutputConfig;
14537 if (mPictureChannel != nullptr) {
14538 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14539 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14540 if (res != OK) {
14541 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14542 __FUNCTION__, strerror(-res), res);
14543
14544 return res;
14545 }
14546
14547 outputStreamConfigs.push_back(yuvOutputConfig);
14548 }
14549
14550 // TODO: consider other channels for YUV output buffers.
14551
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014552 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014553 if (res != OK) {
14554 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14555 strerror(-res), res);
14556 return res;
14557 }
14558
14559 return OK;
14560}
14561
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014562void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14563 if (client == nullptr) {
14564 ALOGE("%s: Opened client is null.", __FUNCTION__);
14565 return;
14566 }
14567
14568 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14569
14570 Mutex::Autolock l(gHdrPlusClientLock);
14571 gHdrPlusClient = std::move(client);
14572 gHdrPlusClientOpening = false;
14573
14574 // Set static metadata.
14575 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14576 if (res != OK) {
14577 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14578 __FUNCTION__, strerror(-res), res);
14579 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14580 gHdrPlusClient = nullptr;
14581 return;
14582 }
14583
14584 // Enable HDR+ mode.
14585 res = enableHdrPlusModeLocked();
14586 if (res != OK) {
14587 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14588 }
14589}
14590
14591void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14592 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14593 Mutex::Autolock l(gHdrPlusClientLock);
14594 gHdrPlusClientOpening = false;
14595}
14596
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014597void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14598 const camera_metadata_t &resultMetadata) {
14599 if (result != nullptr) {
14600 if (result->outputBuffers.size() != 1) {
14601 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14602 result->outputBuffers.size());
14603 return;
14604 }
14605
14606 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14607 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14608 result->outputBuffers[0].streamId);
14609 return;
14610 }
14611
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014612 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014613 HdrPlusPendingRequest pendingRequest;
14614 {
14615 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14616 auto req = mHdrPlusPendingRequests.find(result->requestId);
14617 pendingRequest = req->second;
14618 }
14619
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014620 // Update the result metadata with the settings of the HDR+ still capture request because
14621 // the result metadata belongs to a ZSL buffer.
14622 CameraMetadata metadata;
14623 metadata = &resultMetadata;
14624 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14625 camera_metadata_t* updatedResultMetadata = metadata.release();
14626
14627 QCamera3PicChannel *picChannel =
14628 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14629
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014630 // Check if dumping HDR+ YUV output is enabled.
14631 char prop[PROPERTY_VALUE_MAX];
14632 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14633 bool dumpYuvOutput = atoi(prop);
14634
14635 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014636 // Dump yuv buffer to a ppm file.
14637 pbcamera::StreamConfiguration outputConfig;
14638 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14639 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14640 if (rc == OK) {
14641 char buf[FILENAME_MAX] = {};
14642 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14643 result->requestId, result->outputBuffers[0].streamId,
14644 outputConfig.image.width, outputConfig.image.height);
14645
14646 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14647 } else {
14648 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14649 __FUNCTION__, strerror(-rc), rc);
14650 }
14651 }
14652
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014653 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14654 auto halMetadata = std::make_shared<metadata_buffer_t>();
14655 clear_metadata_buffer(halMetadata.get());
14656
14657 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14658 // encoding.
14659 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14660 halStreamId, /*minFrameDuration*/0);
14661 if (res == OK) {
14662 // Return the buffer to pic channel for encoding.
14663 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14664 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14665 halMetadata);
14666 } else {
14667 // Return the buffer without encoding.
14668 // TODO: This should not happen but we may want to report an error buffer to camera
14669 // service.
14670 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14671 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14672 strerror(-res), res);
14673 }
14674
14675 // Send HDR+ metadata to framework.
14676 {
14677 pthread_mutex_lock(&mMutex);
14678
14679 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14680 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14681 pthread_mutex_unlock(&mMutex);
14682 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014683
14684 // Remove the HDR+ pending request.
14685 {
14686 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14687 auto req = mHdrPlusPendingRequests.find(result->requestId);
14688 mHdrPlusPendingRequests.erase(req);
14689 }
14690 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014691}
14692
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014693void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14694 // TODO: Handle HDR+ capture failures and send the failure to framework.
14695 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14696 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14697
14698 // Return the buffer to pic channel.
14699 QCamera3PicChannel *picChannel =
14700 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14701 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14702
14703 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014704}
14705
Thierry Strudel3d639192016-09-09 11:52:26 -070014706}; //end namespace qcamera