blob: 871f8cb35a0e30972f30bcc976d58de875b5411d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700136// Whether to check for the GPU stride padding, or use the default
137//#define CHECK_GPU_PIXEL_ALIGNMENT
138
Thierry Strudel3d639192016-09-09 11:52:26 -0700139cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
140const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
141extern pthread_mutex_t gCamLock;
142volatile uint32_t gCamHal3LogLevel = 1;
143extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700144
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800145// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146// The following Easel related variables must be protected by gHdrPlusClientLock.
147EaselManagerClient gEaselManagerClient;
148bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
149std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
150bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
157Mutex gHdrPlusClientLock; // Protect above Easel related variables.
158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800459 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mChannelHandle(0),
461 mFirstConfiguration(true),
462 mFlush(false),
463 mFlushPerf(false),
464 mParamHeap(NULL),
465 mParameters(NULL),
466 mPrevParameters(NULL),
467 m_bIsVideo(false),
468 m_bIs4KVideo(false),
469 m_bEisSupportedSize(false),
470 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800471 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700473 mShutterDispatcher(this),
474 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 mMinProcessedFrameDuration(0),
476 mMinJpegFrameDuration(0),
477 mMinRawFrameDuration(0),
478 mMetaFrameCount(0U),
479 mUpdateDebugLevel(false),
480 mCallbacks(callbacks),
481 mCaptureIntent(0),
482 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700483 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800484 /* DevCamDebug metadata internal m control*/
485 mDevCamDebugMetaEnable(0),
486 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mBatchSize(0),
488 mToBeQueuedVidBufs(0),
489 mHFRVideoFps(DEFAULT_VIDEO_FPS),
490 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800491 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800492 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mFirstFrameNumberInBatch(0),
494 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800495 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700496 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
497 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000498 mPDSupported(false),
499 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700500 mInstantAEC(false),
501 mResetInstantAEC(false),
502 mInstantAECSettledFrameNumber(0),
503 mAecSkipDisplayFrameBound(0),
504 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800505 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700506 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700508 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mState(CLOSED),
510 mIsDeviceLinked(false),
511 mIsMainCamera(true),
512 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700513 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800515 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700516 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800517 mIsApInputUsedForHdrPlus(false),
518 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800519 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700520{
521 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCommon.init(gCamCapability[cameraId]);
523 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700524#ifndef USE_HAL_3_3
525 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
526#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700528#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700529 mCameraDevice.common.close = close_camera_device;
530 mCameraDevice.ops = &mCameraOps;
531 mCameraDevice.priv = this;
532 gCamCapability[cameraId]->version = CAM_HAL_V3;
533 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
534 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
535 gCamCapability[cameraId]->min_num_pp_bufs = 3;
536
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800537 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700538
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800539 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700540 mPendingLiveRequest = 0;
541 mCurrentRequestId = -1;
542 pthread_mutex_init(&mMutex, NULL);
543
544 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
545 mDefaultMetadata[i] = NULL;
546
547 // Getting system props of different kinds
548 char prop[PROPERTY_VALUE_MAX];
549 memset(prop, 0, sizeof(prop));
550 property_get("persist.camera.raw.dump", prop, "0");
551 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800552 property_get("persist.camera.hal3.force.hdr", prop, "0");
553 mForceHdrSnapshot = atoi(prop);
554
Thierry Strudel3d639192016-09-09 11:52:26 -0700555 if (mEnableRawDump)
556 LOGD("Raw dump from Camera HAL enabled");
557
558 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
559 memset(mLdafCalib, 0, sizeof(mLdafCalib));
560
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.tnr.preview", prop, "0");
563 m_bTnrPreview = (uint8_t)atoi(prop);
564
565 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800566 property_get("persist.camera.swtnr.preview", prop, "1");
567 m_bSwTnrPreview = (uint8_t)atoi(prop);
568
569 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700570 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700571 m_bTnrVideo = (uint8_t)atoi(prop);
572
573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.avtimer.debug", prop, "0");
575 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800576 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700577
Thierry Strudel54dc9782017-02-15 12:12:10 -0800578 memset(prop, 0, sizeof(prop));
579 property_get("persist.camera.cacmode.disable", prop, "0");
580 m_cacModeDisabled = (uint8_t)atoi(prop);
581
Thierry Strudel3d639192016-09-09 11:52:26 -0700582 //Load and read GPU library.
583 lib_surface_utils = NULL;
584 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700585 mSurfaceStridePadding = CAM_PAD_TO_64;
586#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700587 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
588 if (lib_surface_utils) {
589 *(void **)&LINK_get_surface_pixel_alignment =
590 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
591 if (LINK_get_surface_pixel_alignment) {
592 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
593 }
594 dlclose(lib_surface_utils);
595 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700596#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000597 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
598 mPDSupported = (0 <= mPDIndex) ? true : false;
599
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700600 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700601}
602
603/*===========================================================================
604 * FUNCTION : ~QCamera3HardwareInterface
605 *
606 * DESCRIPTION: destructor of QCamera3HardwareInterface
607 *
608 * PARAMETERS : none
609 *
610 * RETURN : none
611 *==========================================================================*/
612QCamera3HardwareInterface::~QCamera3HardwareInterface()
613{
614 LOGD("E");
615
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800616 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700617
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800618 // Disable power hint and enable the perf lock for close camera
619 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
620 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
621
622 // unlink of dualcam during close camera
623 if (mIsDeviceLinked) {
624 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
625 &m_pDualCamCmdPtr->bundle_info;
626 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
627 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
628 pthread_mutex_lock(&gCamLock);
629
630 if (mIsMainCamera == 1) {
631 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 // related session id should be session id of linked session
635 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
636 } else {
637 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
638 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
639 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
640 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
641 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800642 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800643 pthread_mutex_unlock(&gCamLock);
644
645 rc = mCameraHandle->ops->set_dual_cam_cmd(
646 mCameraHandle->camera_handle);
647 if (rc < 0) {
648 LOGE("Dualcam: Unlink failed, but still proceed to close");
649 }
650 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700651
652 /* We need to stop all streams before deleting any stream */
653 if (mRawDumpChannel) {
654 mRawDumpChannel->stop();
655 }
656
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700657 if (mHdrPlusRawSrcChannel) {
658 mHdrPlusRawSrcChannel->stop();
659 }
660
Thierry Strudel3d639192016-09-09 11:52:26 -0700661 // NOTE: 'camera3_stream_t *' objects are already freed at
662 // this stage by the framework
663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
664 it != mStreamInfo.end(); it++) {
665 QCamera3ProcessingChannel *channel = (*it)->channel;
666 if (channel) {
667 channel->stop();
668 }
669 }
670 if (mSupportChannel)
671 mSupportChannel->stop();
672
673 if (mAnalysisChannel) {
674 mAnalysisChannel->stop();
675 }
676 if (mMetadataChannel) {
677 mMetadataChannel->stop();
678 }
679 if (mChannelHandle) {
680 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
681 mChannelHandle);
682 LOGD("stopping channel %d", mChannelHandle);
683 }
684
685 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
686 it != mStreamInfo.end(); it++) {
687 QCamera3ProcessingChannel *channel = (*it)->channel;
688 if (channel)
689 delete channel;
690 free (*it);
691 }
692 if (mSupportChannel) {
693 delete mSupportChannel;
694 mSupportChannel = NULL;
695 }
696
697 if (mAnalysisChannel) {
698 delete mAnalysisChannel;
699 mAnalysisChannel = NULL;
700 }
701 if (mRawDumpChannel) {
702 delete mRawDumpChannel;
703 mRawDumpChannel = NULL;
704 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700705 if (mHdrPlusRawSrcChannel) {
706 delete mHdrPlusRawSrcChannel;
707 mHdrPlusRawSrcChannel = NULL;
708 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700709 if (mDummyBatchChannel) {
710 delete mDummyBatchChannel;
711 mDummyBatchChannel = NULL;
712 }
713
714 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800715 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700716
717 if (mMetadataChannel) {
718 delete mMetadataChannel;
719 mMetadataChannel = NULL;
720 }
721
722 /* Clean up all channels */
723 if (mCameraInitialized) {
724 if(!mFirstConfiguration){
725 //send the last unconfigure
726 cam_stream_size_info_t stream_config_info;
727 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
728 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
729 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800730 m_bIs4KVideo ? 0 :
731 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700732 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700733 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
734 stream_config_info);
735 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
736 if (rc < 0) {
737 LOGE("set_parms failed for unconfigure");
738 }
739 }
740 deinitParameters();
741 }
742
743 if (mChannelHandle) {
744 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
745 mChannelHandle);
746 LOGH("deleting channel %d", mChannelHandle);
747 mChannelHandle = 0;
748 }
749
750 if (mState != CLOSED)
751 closeCamera();
752
753 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
754 req.mPendingBufferList.clear();
755 }
756 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700757 for (pendingRequestIterator i = mPendingRequestsList.begin();
758 i != mPendingRequestsList.end();) {
759 i = erasePendingRequest(i);
760 }
761 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
762 if (mDefaultMetadata[i])
763 free_camera_metadata(mDefaultMetadata[i]);
764
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800765 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700766
767 pthread_cond_destroy(&mRequestCond);
768
769 pthread_cond_destroy(&mBuffersCond);
770
771 pthread_mutex_destroy(&mMutex);
772 LOGD("X");
773}
774
775/*===========================================================================
776 * FUNCTION : erasePendingRequest
777 *
778 * DESCRIPTION: function to erase a desired pending request after freeing any
779 * allocated memory
780 *
781 * PARAMETERS :
782 * @i : iterator pointing to pending request to be erased
783 *
784 * RETURN : iterator pointing to the next request
785 *==========================================================================*/
786QCamera3HardwareInterface::pendingRequestIterator
787 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
788{
789 if (i->input_buffer != NULL) {
790 free(i->input_buffer);
791 i->input_buffer = NULL;
792 }
793 if (i->settings != NULL)
794 free_camera_metadata((camera_metadata_t*)i->settings);
795 return mPendingRequestsList.erase(i);
796}
797
798/*===========================================================================
799 * FUNCTION : camEvtHandle
800 *
801 * DESCRIPTION: Function registered to mm-camera-interface to handle events
802 *
803 * PARAMETERS :
804 * @camera_handle : interface layer camera handle
805 * @evt : ptr to event
806 * @user_data : user data ptr
807 *
808 * RETURN : none
809 *==========================================================================*/
810void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
811 mm_camera_event_t *evt,
812 void *user_data)
813{
814 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
815 if (obj && evt) {
816 switch(evt->server_event_type) {
817 case CAM_EVENT_TYPE_DAEMON_DIED:
818 pthread_mutex_lock(&obj->mMutex);
819 obj->mState = ERROR;
820 pthread_mutex_unlock(&obj->mMutex);
821 LOGE("Fatal, camera daemon died");
822 break;
823
824 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
825 LOGD("HAL got request pull from Daemon");
826 pthread_mutex_lock(&obj->mMutex);
827 obj->mWokenUpByDaemon = true;
828 obj->unblockRequestIfNecessary();
829 pthread_mutex_unlock(&obj->mMutex);
830 break;
831
832 default:
833 LOGW("Warning: Unhandled event %d",
834 evt->server_event_type);
835 break;
836 }
837 } else {
838 LOGE("NULL user_data/evt");
839 }
840}
841
842/*===========================================================================
843 * FUNCTION : openCamera
844 *
845 * DESCRIPTION: open camera
846 *
847 * PARAMETERS :
848 * @hw_device : double ptr for camera device struct
849 *
850 * RETURN : int32_t type of status
851 * NO_ERROR -- success
852 * none-zero failure code
853 *==========================================================================*/
854int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
855{
856 int rc = 0;
857 if (mState != CLOSED) {
858 *hw_device = NULL;
859 return PERMISSION_DENIED;
860 }
861
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700862 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
865 mCameraId);
866
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700867 if (mCameraHandle) {
868 LOGE("Failure: Camera already opened");
869 return ALREADY_EXISTS;
870 }
871
872 {
873 Mutex::Autolock l(gHdrPlusClientLock);
874 if (gEaselManagerClient.isEaselPresentOnDevice()) {
875 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
876 rc = gEaselManagerClient.resume();
877 if (rc != 0) {
878 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
879 return rc;
880 }
881 }
882 }
883
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 rc = openCamera();
885 if (rc == 0) {
886 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800887 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700889
890 // Suspend Easel because opening camera failed.
891 {
892 Mutex::Autolock l(gHdrPlusClientLock);
893 if (gEaselManagerClient.isEaselPresentOnDevice()) {
894 status_t suspendErr = gEaselManagerClient.suspend();
895 if (suspendErr != 0) {
896 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
897 strerror(-suspendErr), suspendErr);
898 }
899 }
900 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800901 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
904 mCameraId, rc);
905
906 if (rc == NO_ERROR) {
907 mState = OPENED;
908 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 return rc;
911}
912
913/*===========================================================================
914 * FUNCTION : openCamera
915 *
916 * DESCRIPTION: open camera
917 *
918 * PARAMETERS : none
919 *
920 * RETURN : int32_t type of status
921 * NO_ERROR -- success
922 * none-zero failure code
923 *==========================================================================*/
924int QCamera3HardwareInterface::openCamera()
925{
926 int rc = 0;
927 char value[PROPERTY_VALUE_MAX];
928
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800929 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800930
Thierry Strudel3d639192016-09-09 11:52:26 -0700931 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
932 if (rc < 0) {
933 LOGE("Failed to reserve flash for camera id: %d",
934 mCameraId);
935 return UNKNOWN_ERROR;
936 }
937
938 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
939 if (rc) {
940 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
941 return rc;
942 }
943
944 if (!mCameraHandle) {
945 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
946 return -ENODEV;
947 }
948
949 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
950 camEvtHandle, (void *)this);
951
952 if (rc < 0) {
953 LOGE("Error, failed to register event callback");
954 /* Not closing camera here since it is already handled in destructor */
955 return FAILED_TRANSACTION;
956 }
957
958 mExifParams.debug_params =
959 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
960 if (mExifParams.debug_params) {
961 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
962 } else {
963 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
964 return NO_MEMORY;
965 }
966 mFirstConfiguration = true;
967
968 //Notify display HAL that a camera session is active.
969 //But avoid calling the same during bootup because camera service might open/close
970 //cameras at boot time during its initialization and display service will also internally
971 //wait for camera service to initialize first while calling this display API, resulting in a
972 //deadlock situation. Since boot time camera open/close calls are made only to fetch
973 //capabilities, no need of this display bw optimization.
974 //Use "service.bootanim.exit" property to know boot status.
975 property_get("service.bootanim.exit", value, "0");
976 if (atoi(value) == 1) {
977 pthread_mutex_lock(&gCamLock);
978 if (gNumCameraSessions++ == 0) {
979 setCameraLaunchStatus(true);
980 }
981 pthread_mutex_unlock(&gCamLock);
982 }
983
984 //fill the session id needed while linking dual cam
985 pthread_mutex_lock(&gCamLock);
986 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
987 &sessionId[mCameraId]);
988 pthread_mutex_unlock(&gCamLock);
989
990 if (rc < 0) {
991 LOGE("Error, failed to get sessiion id");
992 return UNKNOWN_ERROR;
993 } else {
994 //Allocate related cam sync buffer
995 //this is needed for the payload that goes along with bundling cmd for related
996 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700997 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
998 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700999 if(rc != OK) {
1000 rc = NO_MEMORY;
1001 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1002 return NO_MEMORY;
1003 }
1004
1005 //Map memory for related cam sync buffer
1006 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001007 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1008 m_pDualCamCmdHeap->getFd(0),
1009 sizeof(cam_dual_camera_cmd_info_t),
1010 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001011 if(rc < 0) {
1012 LOGE("Dualcam: failed to map Related cam sync buffer");
1013 rc = FAILED_TRANSACTION;
1014 return NO_MEMORY;
1015 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001016 m_pDualCamCmdPtr =
1017 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 }
1019
1020 LOGH("mCameraId=%d",mCameraId);
1021
1022 return NO_ERROR;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : closeCamera
1027 *
1028 * DESCRIPTION: close camera
1029 *
1030 * PARAMETERS : none
1031 *
1032 * RETURN : int32_t type of status
1033 * NO_ERROR -- success
1034 * none-zero failure code
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::closeCamera()
1037{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001038 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001039 int rc = NO_ERROR;
1040 char value[PROPERTY_VALUE_MAX];
1041
1042 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1043 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001044
1045 // unmap memory for related cam sync buffer
1046 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001047 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001048 if (NULL != m_pDualCamCmdHeap) {
1049 m_pDualCamCmdHeap->deallocate();
1050 delete m_pDualCamCmdHeap;
1051 m_pDualCamCmdHeap = NULL;
1052 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1056 mCameraHandle = NULL;
1057
1058 //reset session id to some invalid id
1059 pthread_mutex_lock(&gCamLock);
1060 sessionId[mCameraId] = 0xDEADBEEF;
1061 pthread_mutex_unlock(&gCamLock);
1062
1063 //Notify display HAL that there is no active camera session
1064 //but avoid calling the same during bootup. Refer to openCamera
1065 //for more details.
1066 property_get("service.bootanim.exit", value, "0");
1067 if (atoi(value) == 1) {
1068 pthread_mutex_lock(&gCamLock);
1069 if (--gNumCameraSessions == 0) {
1070 setCameraLaunchStatus(false);
1071 }
1072 pthread_mutex_unlock(&gCamLock);
1073 }
1074
Thierry Strudel3d639192016-09-09 11:52:26 -07001075 if (mExifParams.debug_params) {
1076 free(mExifParams.debug_params);
1077 mExifParams.debug_params = NULL;
1078 }
1079 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1080 LOGW("Failed to release flash for camera id: %d",
1081 mCameraId);
1082 }
1083 mState = CLOSED;
1084 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1085 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001087 {
1088 Mutex::Autolock l(gHdrPlusClientLock);
1089 if (gHdrPlusClient != nullptr) {
1090 // Disable HDR+ mode.
1091 disableHdrPlusModeLocked();
1092 // Disconnect Easel if it's connected.
1093 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1094 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001095 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001096
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001097 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001098 rc = gEaselManagerClient.stopMipi(mCameraId);
1099 if (rc != 0) {
1100 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1101 }
1102
1103 rc = gEaselManagerClient.suspend();
1104 if (rc != 0) {
1105 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1106 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001107 }
1108 }
1109
Thierry Strudel3d639192016-09-09 11:52:26 -07001110 return rc;
1111}
1112
1113/*===========================================================================
1114 * FUNCTION : initialize
1115 *
1116 * DESCRIPTION: Initialize frameworks callback functions
1117 *
1118 * PARAMETERS :
1119 * @callback_ops : callback function to frameworks
1120 *
1121 * RETURN :
1122 *
1123 *==========================================================================*/
1124int QCamera3HardwareInterface::initialize(
1125 const struct camera3_callback_ops *callback_ops)
1126{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001127 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001128 int rc;
1129
1130 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1131 pthread_mutex_lock(&mMutex);
1132
1133 // Validate current state
1134 switch (mState) {
1135 case OPENED:
1136 /* valid state */
1137 break;
1138 default:
1139 LOGE("Invalid state %d", mState);
1140 rc = -ENODEV;
1141 goto err1;
1142 }
1143
1144 rc = initParameters();
1145 if (rc < 0) {
1146 LOGE("initParamters failed %d", rc);
1147 goto err1;
1148 }
1149 mCallbackOps = callback_ops;
1150
1151 mChannelHandle = mCameraHandle->ops->add_channel(
1152 mCameraHandle->camera_handle, NULL, NULL, this);
1153 if (mChannelHandle == 0) {
1154 LOGE("add_channel failed");
1155 rc = -ENOMEM;
1156 pthread_mutex_unlock(&mMutex);
1157 return rc;
1158 }
1159
1160 pthread_mutex_unlock(&mMutex);
1161 mCameraInitialized = true;
1162 mState = INITIALIZED;
1163 LOGI("X");
1164 return 0;
1165
1166err1:
1167 pthread_mutex_unlock(&mMutex);
1168 return rc;
1169}
1170
1171/*===========================================================================
1172 * FUNCTION : validateStreamDimensions
1173 *
1174 * DESCRIPTION: Check if the configuration requested are those advertised
1175 *
1176 * PARAMETERS :
1177 * @stream_list : streams to be configured
1178 *
1179 * RETURN :
1180 *
1181 *==========================================================================*/
1182int QCamera3HardwareInterface::validateStreamDimensions(
1183 camera3_stream_configuration_t *streamList)
1184{
1185 int rc = NO_ERROR;
1186 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001187 uint32_t depthWidth = 0;
1188 uint32_t depthHeight = 0;
1189 if (mPDSupported) {
1190 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1191 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001193
1194 camera3_stream_t *inputStream = NULL;
1195 /*
1196 * Loop through all streams to find input stream if it exists*
1197 */
1198 for (size_t i = 0; i< streamList->num_streams; i++) {
1199 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1200 if (inputStream != NULL) {
1201 LOGE("Error, Multiple input streams requested");
1202 return -EINVAL;
1203 }
1204 inputStream = streamList->streams[i];
1205 }
1206 }
1207 /*
1208 * Loop through all streams requested in configuration
1209 * Check if unsupported sizes have been requested on any of them
1210 */
1211 for (size_t j = 0; j < streamList->num_streams; j++) {
1212 bool sizeFound = false;
1213 camera3_stream_t *newStream = streamList->streams[j];
1214
1215 uint32_t rotatedHeight = newStream->height;
1216 uint32_t rotatedWidth = newStream->width;
1217 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1218 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1219 rotatedHeight = newStream->width;
1220 rotatedWidth = newStream->height;
1221 }
1222
1223 /*
1224 * Sizes are different for each type of stream format check against
1225 * appropriate table.
1226 */
1227 switch (newStream->format) {
1228 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1229 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1230 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001231 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1232 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1233 mPDSupported) {
1234 if ((depthWidth == newStream->width) &&
1235 (depthHeight == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1241 for (size_t i = 0; i < count; i++) {
1242 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1243 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1244 sizeFound = true;
1245 break;
1246 }
1247 }
1248 break;
1249 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001250 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1251 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001252 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001254 if ((depthSamplesCount == newStream->width) &&
1255 (1 == newStream->height)) {
1256 sizeFound = true;
1257 }
1258 break;
1259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1261 /* Verify set size against generated sizes table */
1262 for (size_t i = 0; i < count; i++) {
1263 if (((int32_t)rotatedWidth ==
1264 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1265 ((int32_t)rotatedHeight ==
1266 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1267 sizeFound = true;
1268 break;
1269 }
1270 }
1271 break;
1272 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1274 default:
1275 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1276 || newStream->stream_type == CAMERA3_STREAM_INPUT
1277 || IS_USAGE_ZSL(newStream->usage)) {
1278 if (((int32_t)rotatedWidth ==
1279 gCamCapability[mCameraId]->active_array_size.width) &&
1280 ((int32_t)rotatedHeight ==
1281 gCamCapability[mCameraId]->active_array_size.height)) {
1282 sizeFound = true;
1283 break;
1284 }
1285 /* We could potentially break here to enforce ZSL stream
1286 * set from frameworks always is full active array size
1287 * but it is not clear from the spc if framework will always
1288 * follow that, also we have logic to override to full array
1289 * size, so keeping the logic lenient at the moment
1290 */
1291 }
1292 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1293 MAX_SIZES_CNT);
1294 for (size_t i = 0; i < count; i++) {
1295 if (((int32_t)rotatedWidth ==
1296 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1297 ((int32_t)rotatedHeight ==
1298 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1299 sizeFound = true;
1300 break;
1301 }
1302 }
1303 break;
1304 } /* End of switch(newStream->format) */
1305
1306 /* We error out even if a single stream has unsupported size set */
1307 if (!sizeFound) {
1308 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1309 rotatedWidth, rotatedHeight, newStream->format,
1310 gCamCapability[mCameraId]->active_array_size.width,
1311 gCamCapability[mCameraId]->active_array_size.height);
1312 rc = -EINVAL;
1313 break;
1314 }
1315 } /* End of for each stream */
1316 return rc;
1317}
1318
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001319/*===========================================================================
1320 * FUNCTION : validateUsageFlags
1321 *
1322 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1323 *
1324 * PARAMETERS :
1325 * @stream_list : streams to be configured
1326 *
1327 * RETURN :
1328 * NO_ERROR if the usage flags are supported
1329 * error code if usage flags are not supported
1330 *
1331 *==========================================================================*/
1332int QCamera3HardwareInterface::validateUsageFlags(
1333 const camera3_stream_configuration_t* streamList)
1334{
1335 for (size_t j = 0; j < streamList->num_streams; j++) {
1336 const camera3_stream_t *newStream = streamList->streams[j];
1337
1338 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1339 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1340 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1341 continue;
1342 }
1343
1344 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1345 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1346 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1347 bool forcePreviewUBWC = true;
1348 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1349 forcePreviewUBWC = false;
1350 }
1351 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1352 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1353 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1354 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1355 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1356 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1357
1358 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1359 // So color spaces will always match.
1360
1361 // Check whether underlying formats of shared streams match.
1362 if (isVideo && isPreview && videoFormat != previewFormat) {
1363 LOGE("Combined video and preview usage flag is not supported");
1364 return -EINVAL;
1365 }
1366 if (isPreview && isZSL && previewFormat != zslFormat) {
1367 LOGE("Combined preview and zsl usage flag is not supported");
1368 return -EINVAL;
1369 }
1370 if (isVideo && isZSL && videoFormat != zslFormat) {
1371 LOGE("Combined video and zsl usage flag is not supported");
1372 return -EINVAL;
1373 }
1374 }
1375 return NO_ERROR;
1376}
1377
1378/*===========================================================================
1379 * FUNCTION : validateUsageFlagsForEis
1380 *
1381 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1382 *
1383 * PARAMETERS :
1384 * @stream_list : streams to be configured
1385 *
1386 * RETURN :
1387 * NO_ERROR if the usage flags are supported
1388 * error code if usage flags are not supported
1389 *
1390 *==========================================================================*/
1391int QCamera3HardwareInterface::validateUsageFlagsForEis(
1392 const camera3_stream_configuration_t* streamList)
1393{
1394 for (size_t j = 0; j < streamList->num_streams; j++) {
1395 const camera3_stream_t *newStream = streamList->streams[j];
1396
1397 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1398 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1399
1400 // Because EIS is "hard-coded" for certain use case, and current
1401 // implementation doesn't support shared preview and video on the same
1402 // stream, return failure if EIS is forced on.
1403 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1404 LOGE("Combined video and preview usage flag is not supported due to EIS");
1405 return -EINVAL;
1406 }
1407 }
1408 return NO_ERROR;
1409}
1410
Thierry Strudel3d639192016-09-09 11:52:26 -07001411/*==============================================================================
1412 * FUNCTION : isSupportChannelNeeded
1413 *
1414 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1415 *
1416 * PARAMETERS :
1417 * @stream_list : streams to be configured
1418 * @stream_config_info : the config info for streams to be configured
1419 *
1420 * RETURN : Boolen true/false decision
1421 *
1422 *==========================================================================*/
1423bool QCamera3HardwareInterface::isSupportChannelNeeded(
1424 camera3_stream_configuration_t *streamList,
1425 cam_stream_size_info_t stream_config_info)
1426{
1427 uint32_t i;
1428 bool pprocRequested = false;
1429 /* Check for conditions where PProc pipeline does not have any streams*/
1430 for (i = 0; i < stream_config_info.num_streams; i++) {
1431 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1432 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1433 pprocRequested = true;
1434 break;
1435 }
1436 }
1437
1438 if (pprocRequested == false )
1439 return true;
1440
1441 /* Dummy stream needed if only raw or jpeg streams present */
1442 for (i = 0; i < streamList->num_streams; i++) {
1443 switch(streamList->streams[i]->format) {
1444 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1445 case HAL_PIXEL_FORMAT_RAW10:
1446 case HAL_PIXEL_FORMAT_RAW16:
1447 case HAL_PIXEL_FORMAT_BLOB:
1448 break;
1449 default:
1450 return false;
1451 }
1452 }
1453 return true;
1454}
1455
1456/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001457 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001458 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001459 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001460 *
1461 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001462 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001463 *
1464 * RETURN : int32_t type of status
1465 * NO_ERROR -- success
1466 * none-zero failure code
1467 *
1468 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001470{
1471 int32_t rc = NO_ERROR;
1472
1473 cam_dimension_t max_dim = {0, 0};
1474 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1475 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1476 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1477 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1478 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1479 }
1480
1481 clear_metadata_buffer(mParameters);
1482
1483 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1484 max_dim);
1485 if (rc != NO_ERROR) {
1486 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1487 return rc;
1488 }
1489
1490 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1491 if (rc != NO_ERROR) {
1492 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1493 return rc;
1494 }
1495
1496 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001497 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001498
1499 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1500 mParameters);
1501 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001502 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001503 return rc;
1504 }
1505
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001506 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001507 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1508 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1509 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1510 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1511 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001512
1513 return rc;
1514}
1515
1516/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 * FUNCTION : addToPPFeatureMask
1518 *
1519 * DESCRIPTION: add additional features to pp feature mask based on
1520 * stream type and usecase
1521 *
1522 * PARAMETERS :
1523 * @stream_format : stream type for feature mask
1524 * @stream_idx : stream idx within postprocess_mask list to change
1525 *
1526 * RETURN : NULL
1527 *
1528 *==========================================================================*/
1529void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1530 uint32_t stream_idx)
1531{
1532 char feature_mask_value[PROPERTY_VALUE_MAX];
1533 cam_feature_mask_t feature_mask;
1534 int args_converted;
1535 int property_len;
1536
1537 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001538#ifdef _LE_CAMERA_
1539 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1540 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1541 property_len = property_get("persist.camera.hal3.feature",
1542 feature_mask_value, swtnr_feature_mask_value);
1543#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001544 property_len = property_get("persist.camera.hal3.feature",
1545 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001546#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001547 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1548 (feature_mask_value[1] == 'x')) {
1549 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1550 } else {
1551 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1552 }
1553 if (1 != args_converted) {
1554 feature_mask = 0;
1555 LOGE("Wrong feature mask %s", feature_mask_value);
1556 return;
1557 }
1558
1559 switch (stream_format) {
1560 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1561 /* Add LLVD to pp feature mask only if video hint is enabled */
1562 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1563 mStreamConfigInfo.postprocess_mask[stream_idx]
1564 |= CAM_QTI_FEATURE_SW_TNR;
1565 LOGH("Added SW TNR to pp feature mask");
1566 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1567 mStreamConfigInfo.postprocess_mask[stream_idx]
1568 |= CAM_QCOM_FEATURE_LLVD;
1569 LOGH("Added LLVD SeeMore to pp feature mask");
1570 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001571 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1572 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1573 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1574 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001575 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1576 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1577 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1578 CAM_QTI_FEATURE_BINNING_CORRECTION;
1579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 break;
1581 }
1582 default:
1583 break;
1584 }
1585 LOGD("PP feature mask %llx",
1586 mStreamConfigInfo.postprocess_mask[stream_idx]);
1587}
1588
1589/*==============================================================================
1590 * FUNCTION : updateFpsInPreviewBuffer
1591 *
1592 * DESCRIPTION: update FPS information in preview buffer.
1593 *
1594 * PARAMETERS :
1595 * @metadata : pointer to metadata buffer
1596 * @frame_number: frame_number to look for in pending buffer list
1597 *
1598 * RETURN : None
1599 *
1600 *==========================================================================*/
1601void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1602 uint32_t frame_number)
1603{
1604 // Mark all pending buffers for this particular request
1605 // with corresponding framerate information
1606 for (List<PendingBuffersInRequest>::iterator req =
1607 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1608 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1609 for(List<PendingBufferInfo>::iterator j =
1610 req->mPendingBufferList.begin();
1611 j != req->mPendingBufferList.end(); j++) {
1612 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1613 if ((req->frame_number == frame_number) &&
1614 (channel->getStreamTypeMask() &
1615 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1616 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1617 CAM_INTF_PARM_FPS_RANGE, metadata) {
1618 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1619 struct private_handle_t *priv_handle =
1620 (struct private_handle_t *)(*(j->buffer));
1621 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1622 }
1623 }
1624 }
1625 }
1626}
1627
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001628/*==============================================================================
1629 * FUNCTION : updateTimeStampInPendingBuffers
1630 *
1631 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1632 * of a frame number
1633 *
1634 * PARAMETERS :
1635 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1636 * @timestamp : timestamp to be set
1637 *
1638 * RETURN : None
1639 *
1640 *==========================================================================*/
1641void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1642 uint32_t frameNumber, nsecs_t timestamp)
1643{
1644 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1645 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1646 if (req->frame_number != frameNumber)
1647 continue;
1648
1649 for (auto k = req->mPendingBufferList.begin();
1650 k != req->mPendingBufferList.end(); k++ ) {
1651 struct private_handle_t *priv_handle =
1652 (struct private_handle_t *) (*(k->buffer));
1653 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1654 }
1655 }
1656 return;
1657}
1658
Thierry Strudel3d639192016-09-09 11:52:26 -07001659/*===========================================================================
1660 * FUNCTION : configureStreams
1661 *
1662 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1663 * and output streams.
1664 *
1665 * PARAMETERS :
1666 * @stream_list : streams to be configured
1667 *
1668 * RETURN :
1669 *
1670 *==========================================================================*/
1671int QCamera3HardwareInterface::configureStreams(
1672 camera3_stream_configuration_t *streamList)
1673{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001674 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001675 int rc = 0;
1676
1677 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001678 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001679 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001680 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681
1682 return rc;
1683}
1684
1685/*===========================================================================
1686 * FUNCTION : configureStreamsPerfLocked
1687 *
1688 * DESCRIPTION: configureStreams while perfLock is held.
1689 *
1690 * PARAMETERS :
1691 * @stream_list : streams to be configured
1692 *
1693 * RETURN : int32_t type of status
1694 * NO_ERROR -- success
1695 * none-zero failure code
1696 *==========================================================================*/
1697int QCamera3HardwareInterface::configureStreamsPerfLocked(
1698 camera3_stream_configuration_t *streamList)
1699{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001701 int rc = 0;
1702
1703 // Sanity check stream_list
1704 if (streamList == NULL) {
1705 LOGE("NULL stream configuration");
1706 return BAD_VALUE;
1707 }
1708 if (streamList->streams == NULL) {
1709 LOGE("NULL stream list");
1710 return BAD_VALUE;
1711 }
1712
1713 if (streamList->num_streams < 1) {
1714 LOGE("Bad number of streams requested: %d",
1715 streamList->num_streams);
1716 return BAD_VALUE;
1717 }
1718
1719 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1720 LOGE("Maximum number of streams %d exceeded: %d",
1721 MAX_NUM_STREAMS, streamList->num_streams);
1722 return BAD_VALUE;
1723 }
1724
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001725 rc = validateUsageFlags(streamList);
1726 if (rc != NO_ERROR) {
1727 return rc;
1728 }
1729
Thierry Strudel3d639192016-09-09 11:52:26 -07001730 mOpMode = streamList->operation_mode;
1731 LOGD("mOpMode: %d", mOpMode);
1732
1733 /* first invalidate all the steams in the mStreamList
1734 * if they appear again, they will be validated */
1735 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1736 it != mStreamInfo.end(); it++) {
1737 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1738 if (channel) {
1739 channel->stop();
1740 }
1741 (*it)->status = INVALID;
1742 }
1743
1744 if (mRawDumpChannel) {
1745 mRawDumpChannel->stop();
1746 delete mRawDumpChannel;
1747 mRawDumpChannel = NULL;
1748 }
1749
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001750 if (mHdrPlusRawSrcChannel) {
1751 mHdrPlusRawSrcChannel->stop();
1752 delete mHdrPlusRawSrcChannel;
1753 mHdrPlusRawSrcChannel = NULL;
1754 }
1755
Thierry Strudel3d639192016-09-09 11:52:26 -07001756 if (mSupportChannel)
1757 mSupportChannel->stop();
1758
1759 if (mAnalysisChannel) {
1760 mAnalysisChannel->stop();
1761 }
1762 if (mMetadataChannel) {
1763 /* If content of mStreamInfo is not 0, there is metadata stream */
1764 mMetadataChannel->stop();
1765 }
1766 if (mChannelHandle) {
1767 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1768 mChannelHandle);
1769 LOGD("stopping channel %d", mChannelHandle);
1770 }
1771
1772 pthread_mutex_lock(&mMutex);
1773
1774 // Check state
1775 switch (mState) {
1776 case INITIALIZED:
1777 case CONFIGURED:
1778 case STARTED:
1779 /* valid state */
1780 break;
1781 default:
1782 LOGE("Invalid state %d", mState);
1783 pthread_mutex_unlock(&mMutex);
1784 return -ENODEV;
1785 }
1786
1787 /* Check whether we have video stream */
1788 m_bIs4KVideo = false;
1789 m_bIsVideo = false;
1790 m_bEisSupportedSize = false;
1791 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001792 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001793 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001794 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001795 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 uint32_t videoWidth = 0U;
1797 uint32_t videoHeight = 0U;
1798 size_t rawStreamCnt = 0;
1799 size_t stallStreamCnt = 0;
1800 size_t processedStreamCnt = 0;
1801 // Number of streams on ISP encoder path
1802 size_t numStreamsOnEncoder = 0;
1803 size_t numYuv888OnEncoder = 0;
1804 bool bYuv888OverrideJpeg = false;
1805 cam_dimension_t largeYuv888Size = {0, 0};
1806 cam_dimension_t maxViewfinderSize = {0, 0};
1807 bool bJpegExceeds4K = false;
1808 bool bJpegOnEncoder = false;
1809 bool bUseCommonFeatureMask = false;
1810 cam_feature_mask_t commonFeatureMask = 0;
1811 bool bSmallJpegSize = false;
1812 uint32_t width_ratio;
1813 uint32_t height_ratio;
1814 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1815 camera3_stream_t *inputStream = NULL;
1816 bool isJpeg = false;
1817 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001818 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001819 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001820
1821 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1822
1823 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 uint8_t eis_prop_set;
1825 uint32_t maxEisWidth = 0;
1826 uint32_t maxEisHeight = 0;
1827
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001828 // Initialize all instant AEC related variables
1829 mInstantAEC = false;
1830 mResetInstantAEC = false;
1831 mInstantAECSettledFrameNumber = 0;
1832 mAecSkipDisplayFrameBound = 0;
1833 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001834 mCurrFeatureState = 0;
1835 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001836
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1838
1839 size_t count = IS_TYPE_MAX;
1840 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1841 for (size_t i = 0; i < count; i++) {
1842 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001843 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1844 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 break;
1846 }
1847 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001848
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001849 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001850 maxEisWidth = MAX_EIS_WIDTH;
1851 maxEisHeight = MAX_EIS_HEIGHT;
1852 }
1853
1854 /* EIS setprop control */
1855 char eis_prop[PROPERTY_VALUE_MAX];
1856 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001857 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001858 eis_prop_set = (uint8_t)atoi(eis_prop);
1859
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001860 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1862
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001863 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1864 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001865
Thierry Strudel3d639192016-09-09 11:52:26 -07001866 /* stream configurations */
1867 for (size_t i = 0; i < streamList->num_streams; i++) {
1868 camera3_stream_t *newStream = streamList->streams[i];
1869 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1870 "height = %d, rotation = %d, usage = 0x%x",
1871 i, newStream->stream_type, newStream->format,
1872 newStream->width, newStream->height, newStream->rotation,
1873 newStream->usage);
1874 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1875 newStream->stream_type == CAMERA3_STREAM_INPUT){
1876 isZsl = true;
1877 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001878 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1879 IS_USAGE_PREVIEW(newStream->usage)) {
1880 isPreview = true;
1881 }
1882
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1884 inputStream = newStream;
1885 }
1886
Emilian Peev7650c122017-01-19 08:24:33 -08001887 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1888 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 isJpeg = true;
1890 jpegSize.width = newStream->width;
1891 jpegSize.height = newStream->height;
1892 if (newStream->width > VIDEO_4K_WIDTH ||
1893 newStream->height > VIDEO_4K_HEIGHT)
1894 bJpegExceeds4K = true;
1895 }
1896
1897 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1898 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1899 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001900 // In HAL3 we can have multiple different video streams.
1901 // The variables video width and height are used below as
1902 // dimensions of the biggest of them
1903 if (videoWidth < newStream->width ||
1904 videoHeight < newStream->height) {
1905 videoWidth = newStream->width;
1906 videoHeight = newStream->height;
1907 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001908 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1909 (VIDEO_4K_HEIGHT <= newStream->height)) {
1910 m_bIs4KVideo = true;
1911 }
1912 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1913 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001914
Thierry Strudel3d639192016-09-09 11:52:26 -07001915 }
1916 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1917 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1918 switch (newStream->format) {
1919 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001920 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1921 depthPresent = true;
1922 break;
1923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001924 stallStreamCnt++;
1925 if (isOnEncoder(maxViewfinderSize, newStream->width,
1926 newStream->height)) {
1927 numStreamsOnEncoder++;
1928 bJpegOnEncoder = true;
1929 }
1930 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1931 newStream->width);
1932 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1933 newStream->height);;
1934 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1935 "FATAL: max_downscale_factor cannot be zero and so assert");
1936 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1937 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1938 LOGH("Setting small jpeg size flag to true");
1939 bSmallJpegSize = true;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_RAW10:
1943 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1944 case HAL_PIXEL_FORMAT_RAW16:
1945 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001946 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1947 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1948 pdStatCount++;
1949 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001950 break;
1951 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1952 processedStreamCnt++;
1953 if (isOnEncoder(maxViewfinderSize, newStream->width,
1954 newStream->height)) {
1955 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1956 !IS_USAGE_ZSL(newStream->usage)) {
1957 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1958 }
1959 numStreamsOnEncoder++;
1960 }
1961 break;
1962 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 // If Yuv888 size is not greater than 4K, set feature mask
1967 // to SUPERSET so that it support concurrent request on
1968 // YUV and JPEG.
1969 if (newStream->width <= VIDEO_4K_WIDTH &&
1970 newStream->height <= VIDEO_4K_HEIGHT) {
1971 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1972 }
1973 numStreamsOnEncoder++;
1974 numYuv888OnEncoder++;
1975 largeYuv888Size.width = newStream->width;
1976 largeYuv888Size.height = newStream->height;
1977 }
1978 break;
1979 default:
1980 processedStreamCnt++;
1981 if (isOnEncoder(maxViewfinderSize, newStream->width,
1982 newStream->height)) {
1983 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1984 numStreamsOnEncoder++;
1985 }
1986 break;
1987 }
1988
1989 }
1990 }
1991
1992 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1993 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1994 !m_bIsVideo) {
1995 m_bEisEnable = false;
1996 }
1997
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001998 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1999 pthread_mutex_unlock(&mMutex);
2000 return -EINVAL;
2001 }
2002
Thierry Strudel54dc9782017-02-15 12:12:10 -08002003 uint8_t forceEnableTnr = 0;
2004 char tnr_prop[PROPERTY_VALUE_MAX];
2005 memset(tnr_prop, 0, sizeof(tnr_prop));
2006 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2007 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Logic to enable/disable TNR based on specific config size/etc.*/
2010 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002011 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2012 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002013 else if (forceEnableTnr)
2014 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002015
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002016 char videoHdrProp[PROPERTY_VALUE_MAX];
2017 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2018 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2019 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2020
2021 if (hdr_mode_prop == 1 && m_bIsVideo &&
2022 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2023 m_bVideoHdrEnabled = true;
2024 else
2025 m_bVideoHdrEnabled = false;
2026
2027
Thierry Strudel3d639192016-09-09 11:52:26 -07002028 /* Check if num_streams is sane */
2029 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2030 rawStreamCnt > MAX_RAW_STREAMS ||
2031 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2032 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2033 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2034 pthread_mutex_unlock(&mMutex);
2035 return -EINVAL;
2036 }
2037 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002038 if (isZsl && m_bIs4KVideo) {
2039 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002040 pthread_mutex_unlock(&mMutex);
2041 return -EINVAL;
2042 }
2043 /* Check if stream sizes are sane */
2044 if (numStreamsOnEncoder > 2) {
2045 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2046 pthread_mutex_unlock(&mMutex);
2047 return -EINVAL;
2048 } else if (1 < numStreamsOnEncoder){
2049 bUseCommonFeatureMask = true;
2050 LOGH("Multiple streams above max viewfinder size, common mask needed");
2051 }
2052
2053 /* Check if BLOB size is greater than 4k in 4k recording case */
2054 if (m_bIs4KVideo && bJpegExceeds4K) {
2055 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2056 pthread_mutex_unlock(&mMutex);
2057 return -EINVAL;
2058 }
2059
Emilian Peev7650c122017-01-19 08:24:33 -08002060 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2061 depthPresent) {
2062 LOGE("HAL doesn't support depth streams in HFR mode!");
2063 pthread_mutex_unlock(&mMutex);
2064 return -EINVAL;
2065 }
2066
Thierry Strudel3d639192016-09-09 11:52:26 -07002067 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2068 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2069 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2070 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2071 // configurations:
2072 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2073 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2074 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2075 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2076 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2077 __func__);
2078 pthread_mutex_unlock(&mMutex);
2079 return -EINVAL;
2080 }
2081
2082 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2083 // the YUV stream's size is greater or equal to the JPEG size, set common
2084 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2085 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2086 jpegSize.width, jpegSize.height) &&
2087 largeYuv888Size.width > jpegSize.width &&
2088 largeYuv888Size.height > jpegSize.height) {
2089 bYuv888OverrideJpeg = true;
2090 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2091 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2092 }
2093
2094 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2095 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2096 commonFeatureMask);
2097 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2098 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2099
2100 rc = validateStreamDimensions(streamList);
2101 if (rc == NO_ERROR) {
2102 rc = validateStreamRotations(streamList);
2103 }
2104 if (rc != NO_ERROR) {
2105 LOGE("Invalid stream configuration requested!");
2106 pthread_mutex_unlock(&mMutex);
2107 return rc;
2108 }
2109
Emilian Peev0f3c3162017-03-15 12:57:46 +00002110 if (1 < pdStatCount) {
2111 LOGE("HAL doesn't support multiple PD streams");
2112 pthread_mutex_unlock(&mMutex);
2113 return -EINVAL;
2114 }
2115
2116 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2117 (1 == pdStatCount)) {
2118 LOGE("HAL doesn't support PD streams in HFR mode!");
2119 pthread_mutex_unlock(&mMutex);
2120 return -EINVAL;
2121 }
2122
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2124 for (size_t i = 0; i < streamList->num_streams; i++) {
2125 camera3_stream_t *newStream = streamList->streams[i];
2126 LOGH("newStream type = %d, stream format = %d "
2127 "stream size : %d x %d, stream rotation = %d",
2128 newStream->stream_type, newStream->format,
2129 newStream->width, newStream->height, newStream->rotation);
2130 //if the stream is in the mStreamList validate it
2131 bool stream_exists = false;
2132 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2133 it != mStreamInfo.end(); it++) {
2134 if ((*it)->stream == newStream) {
2135 QCamera3ProcessingChannel *channel =
2136 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2137 stream_exists = true;
2138 if (channel)
2139 delete channel;
2140 (*it)->status = VALID;
2141 (*it)->stream->priv = NULL;
2142 (*it)->channel = NULL;
2143 }
2144 }
2145 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2146 //new stream
2147 stream_info_t* stream_info;
2148 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2149 if (!stream_info) {
2150 LOGE("Could not allocate stream info");
2151 rc = -ENOMEM;
2152 pthread_mutex_unlock(&mMutex);
2153 return rc;
2154 }
2155 stream_info->stream = newStream;
2156 stream_info->status = VALID;
2157 stream_info->channel = NULL;
2158 mStreamInfo.push_back(stream_info);
2159 }
2160 /* Covers Opaque ZSL and API1 F/W ZSL */
2161 if (IS_USAGE_ZSL(newStream->usage)
2162 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2163 if (zslStream != NULL) {
2164 LOGE("Multiple input/reprocess streams requested!");
2165 pthread_mutex_unlock(&mMutex);
2166 return BAD_VALUE;
2167 }
2168 zslStream = newStream;
2169 }
2170 /* Covers YUV reprocess */
2171 if (inputStream != NULL) {
2172 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2173 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2174 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2175 && inputStream->width == newStream->width
2176 && inputStream->height == newStream->height) {
2177 if (zslStream != NULL) {
2178 /* This scenario indicates multiple YUV streams with same size
2179 * as input stream have been requested, since zsl stream handle
2180 * is solely use for the purpose of overriding the size of streams
2181 * which share h/w streams we will just make a guess here as to
2182 * which of the stream is a ZSL stream, this will be refactored
2183 * once we make generic logic for streams sharing encoder output
2184 */
2185 LOGH("Warning, Multiple ip/reprocess streams requested!");
2186 }
2187 zslStream = newStream;
2188 }
2189 }
2190 }
2191
2192 /* If a zsl stream is set, we know that we have configured at least one input or
2193 bidirectional stream */
2194 if (NULL != zslStream) {
2195 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2196 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2197 mInputStreamInfo.format = zslStream->format;
2198 mInputStreamInfo.usage = zslStream->usage;
2199 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2200 mInputStreamInfo.dim.width,
2201 mInputStreamInfo.dim.height,
2202 mInputStreamInfo.format, mInputStreamInfo.usage);
2203 }
2204
2205 cleanAndSortStreamInfo();
2206 if (mMetadataChannel) {
2207 delete mMetadataChannel;
2208 mMetadataChannel = NULL;
2209 }
2210 if (mSupportChannel) {
2211 delete mSupportChannel;
2212 mSupportChannel = NULL;
2213 }
2214
2215 if (mAnalysisChannel) {
2216 delete mAnalysisChannel;
2217 mAnalysisChannel = NULL;
2218 }
2219
2220 if (mDummyBatchChannel) {
2221 delete mDummyBatchChannel;
2222 mDummyBatchChannel = NULL;
2223 }
2224
Emilian Peev7650c122017-01-19 08:24:33 -08002225 if (mDepthChannel) {
2226 mDepthChannel = NULL;
2227 }
2228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002229 mShutterDispatcher.clear();
2230 mOutputBufferDispatcher.clear();
2231
Thierry Strudel2896d122017-02-23 19:18:03 -08002232 char is_type_value[PROPERTY_VALUE_MAX];
2233 property_get("persist.camera.is_type", is_type_value, "4");
2234 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2235
Binhao Line406f062017-05-03 14:39:44 -07002236 char property_value[PROPERTY_VALUE_MAX];
2237 property_get("persist.camera.gzoom.at", property_value, "0");
2238 int goog_zoom_at = atoi(property_value);
2239 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2240 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2241
2242 property_get("persist.camera.gzoom.4k", property_value, "0");
2243 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2244
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 //Create metadata channel and initialize it
2246 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2247 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2248 gCamCapability[mCameraId]->color_arrangement);
2249 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2250 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002251 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002252 if (mMetadataChannel == NULL) {
2253 LOGE("failed to allocate metadata channel");
2254 rc = -ENOMEM;
2255 pthread_mutex_unlock(&mMutex);
2256 return rc;
2257 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002258 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002259 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2260 if (rc < 0) {
2261 LOGE("metadata channel initialization failed");
2262 delete mMetadataChannel;
2263 mMetadataChannel = NULL;
2264 pthread_mutex_unlock(&mMutex);
2265 return rc;
2266 }
2267
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002271 // Keep track of preview/video streams indices.
2272 // There could be more than one preview streams, but only one video stream.
2273 int32_t video_stream_idx = -1;
2274 int32_t preview_stream_idx[streamList->num_streams];
2275 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2277 /* Allocate channel objects for the requested streams */
2278 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002279
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 camera3_stream_t *newStream = streamList->streams[i];
2281 uint32_t stream_usage = newStream->usage;
2282 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2283 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2284 struct camera_info *p_info = NULL;
2285 pthread_mutex_lock(&gCamLock);
2286 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2287 pthread_mutex_unlock(&gCamLock);
2288 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2289 || IS_USAGE_ZSL(newStream->usage)) &&
2290 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002291 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002292 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002293 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2294 if (bUseCommonFeatureMask)
2295 zsl_ppmask = commonFeatureMask;
2296 else
2297 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002299 if (numStreamsOnEncoder > 0)
2300 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2301 else
2302 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002306 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 LOGH("Input stream configured, reprocess config");
2308 } else {
2309 //for non zsl streams find out the format
2310 switch (newStream->format) {
2311 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2312 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002313 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2315 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2316 /* add additional features to pp feature mask */
2317 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2318 mStreamConfigInfo.num_streams);
2319
2320 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2321 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2322 CAM_STREAM_TYPE_VIDEO;
2323 if (m_bTnrEnabled && m_bTnrVideo) {
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2325 CAM_QCOM_FEATURE_CPP_TNR;
2326 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2328 ~CAM_QCOM_FEATURE_CDS;
2329 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2332 CAM_QTI_FEATURE_PPEISCORE;
2333 }
Binhao Line406f062017-05-03 14:39:44 -07002334 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2336 CAM_QCOM_FEATURE_GOOG_ZOOM;
2337 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002338 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002339 } else {
2340 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2341 CAM_STREAM_TYPE_PREVIEW;
2342 if (m_bTnrEnabled && m_bTnrPreview) {
2343 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2344 CAM_QCOM_FEATURE_CPP_TNR;
2345 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2346 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2347 ~CAM_QCOM_FEATURE_CDS;
2348 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002349 if(!m_bSwTnrPreview) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2351 ~CAM_QTI_FEATURE_SW_TNR;
2352 }
Binhao Line406f062017-05-03 14:39:44 -07002353 if (is_goog_zoom_preview_enabled) {
2354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2355 CAM_QCOM_FEATURE_GOOG_ZOOM;
2356 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002357 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 padding_info.width_padding = mSurfaceStridePadding;
2359 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002360 previewSize.width = (int32_t)newStream->width;
2361 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002362 }
2363 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2364 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2365 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2366 newStream->height;
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2368 newStream->width;
2369 }
2370 }
2371 break;
2372 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002373 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002374 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2375 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2376 if (bUseCommonFeatureMask)
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2378 commonFeatureMask;
2379 else
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2381 CAM_QCOM_FEATURE_NONE;
2382 } else {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2384 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2385 }
2386 break;
2387 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002388 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2390 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2391 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2392 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2393 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002394 /* Remove rotation if it is not supported
2395 for 4K LiveVideo snapshot case (online processing) */
2396 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2397 CAM_QCOM_FEATURE_ROTATION)) {
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2399 &= ~CAM_QCOM_FEATURE_ROTATION;
2400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002401 } else {
2402 if (bUseCommonFeatureMask &&
2403 isOnEncoder(maxViewfinderSize, newStream->width,
2404 newStream->height)) {
2405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2406 } else {
2407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2408 }
2409 }
2410 if (isZsl) {
2411 if (zslStream) {
2412 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2413 (int32_t)zslStream->width;
2414 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2415 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2417 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 } else {
2419 LOGE("Error, No ZSL stream identified");
2420 pthread_mutex_unlock(&mMutex);
2421 return -EINVAL;
2422 }
2423 } else if (m_bIs4KVideo) {
2424 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2425 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2426 } else if (bYuv888OverrideJpeg) {
2427 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2428 (int32_t)largeYuv888Size.width;
2429 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2430 (int32_t)largeYuv888Size.height;
2431 }
2432 break;
2433 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2434 case HAL_PIXEL_FORMAT_RAW16:
2435 case HAL_PIXEL_FORMAT_RAW10:
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2438 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002439 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2440 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2441 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2442 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2443 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2444 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2445 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2446 gCamCapability[mCameraId]->dt[mPDIndex];
2447 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2448 gCamCapability[mCameraId]->vc[mPDIndex];
2449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002450 break;
2451 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002452 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2455 break;
2456 }
2457 }
2458
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462
2463 if (newStream->priv == NULL) {
2464 //New stream, construct channel
2465 switch (newStream->stream_type) {
2466 case CAMERA3_STREAM_INPUT:
2467 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2468 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2469 break;
2470 case CAMERA3_STREAM_BIDIRECTIONAL:
2471 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2472 GRALLOC_USAGE_HW_CAMERA_WRITE;
2473 break;
2474 case CAMERA3_STREAM_OUTPUT:
2475 /* For video encoding stream, set read/write rarely
2476 * flag so that they may be set to un-cached */
2477 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2478 newStream->usage |=
2479 (GRALLOC_USAGE_SW_READ_RARELY |
2480 GRALLOC_USAGE_SW_WRITE_RARELY |
2481 GRALLOC_USAGE_HW_CAMERA_WRITE);
2482 else if (IS_USAGE_ZSL(newStream->usage))
2483 {
2484 LOGD("ZSL usage flag skipping");
2485 }
2486 else if (newStream == zslStream
2487 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2488 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2489 } else
2490 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2491 break;
2492 default:
2493 LOGE("Invalid stream_type %d", newStream->stream_type);
2494 break;
2495 }
2496
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002497 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2499 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2500 QCamera3ProcessingChannel *channel = NULL;
2501 switch (newStream->format) {
2502 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2503 if ((newStream->usage &
2504 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2505 (streamList->operation_mode ==
2506 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2507 ) {
2508 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2509 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002510 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 this,
2512 newStream,
2513 (cam_stream_type_t)
2514 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2515 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2516 mMetadataChannel,
2517 0); //heap buffers are not required for HFR video channel
2518 if (channel == NULL) {
2519 LOGE("allocation of channel failed");
2520 pthread_mutex_unlock(&mMutex);
2521 return -ENOMEM;
2522 }
2523 //channel->getNumBuffers() will return 0 here so use
2524 //MAX_INFLIGH_HFR_REQUESTS
2525 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2526 newStream->priv = channel;
2527 LOGI("num video buffers in HFR mode: %d",
2528 MAX_INFLIGHT_HFR_REQUESTS);
2529 } else {
2530 /* Copy stream contents in HFR preview only case to create
2531 * dummy batch channel so that sensor streaming is in
2532 * HFR mode */
2533 if (!m_bIsVideo && (streamList->operation_mode ==
2534 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2535 mDummyBatchStream = *newStream;
2536 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 int bufferCount = MAX_INFLIGHT_REQUESTS;
2538 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2539 CAM_STREAM_TYPE_VIDEO) {
2540 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2541 bufferCount = MAX_VIDEO_BUFFERS;
2542 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2544 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002545 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 this,
2547 newStream,
2548 (cam_stream_type_t)
2549 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2551 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 if (channel == NULL) {
2554 LOGE("allocation of channel failed");
2555 pthread_mutex_unlock(&mMutex);
2556 return -ENOMEM;
2557 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002558 /* disable UBWC for preview, though supported,
2559 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002560 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002561 (previewSize.width == (int32_t)videoWidth)&&
2562 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002563 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002564 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002565 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002566 /* When goog_zoom is linked to the preview or video stream,
2567 * disable ubwc to the linked stream */
2568 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2569 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2570 channel->setUBWCEnabled(false);
2571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 newStream->max_buffers = channel->getNumBuffers();
2573 newStream->priv = channel;
2574 }
2575 break;
2576 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2577 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2578 mChannelHandle,
2579 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002580 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002581 this,
2582 newStream,
2583 (cam_stream_type_t)
2584 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2585 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2586 mMetadataChannel);
2587 if (channel == NULL) {
2588 LOGE("allocation of YUV channel failed");
2589 pthread_mutex_unlock(&mMutex);
2590 return -ENOMEM;
2591 }
2592 newStream->max_buffers = channel->getNumBuffers();
2593 newStream->priv = channel;
2594 break;
2595 }
2596 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2597 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002598 case HAL_PIXEL_FORMAT_RAW10: {
2599 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2600 (HAL_DATASPACE_DEPTH != newStream->data_space))
2601 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002602 mRawChannel = new QCamera3RawChannel(
2603 mCameraHandle->camera_handle, mChannelHandle,
2604 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002605 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 this, newStream,
2607 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002608 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002609 if (mRawChannel == NULL) {
2610 LOGE("allocation of raw channel failed");
2611 pthread_mutex_unlock(&mMutex);
2612 return -ENOMEM;
2613 }
2614 newStream->max_buffers = mRawChannel->getNumBuffers();
2615 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2616 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002617 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002618 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002619 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2620 mDepthChannel = new QCamera3DepthChannel(
2621 mCameraHandle->camera_handle, mChannelHandle,
2622 mCameraHandle->ops, NULL, NULL, &padding_info,
2623 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2624 mMetadataChannel);
2625 if (NULL == mDepthChannel) {
2626 LOGE("Allocation of depth channel failed");
2627 pthread_mutex_unlock(&mMutex);
2628 return NO_MEMORY;
2629 }
2630 newStream->priv = mDepthChannel;
2631 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2632 } else {
2633 // Max live snapshot inflight buffer is 1. This is to mitigate
2634 // frame drop issues for video snapshot. The more buffers being
2635 // allocated, the more frame drops there are.
2636 mPictureChannel = new QCamera3PicChannel(
2637 mCameraHandle->camera_handle, mChannelHandle,
2638 mCameraHandle->ops, captureResultCb,
2639 setBufferErrorStatus, &padding_info, this, newStream,
2640 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2641 m_bIs4KVideo, isZsl, mMetadataChannel,
2642 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2643 if (mPictureChannel == NULL) {
2644 LOGE("allocation of channel failed");
2645 pthread_mutex_unlock(&mMutex);
2646 return -ENOMEM;
2647 }
2648 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2649 newStream->max_buffers = mPictureChannel->getNumBuffers();
2650 mPictureChannel->overrideYuvSize(
2651 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2652 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002653 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002654 break;
2655
2656 default:
2657 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002658 pthread_mutex_unlock(&mMutex);
2659 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 }
2661 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2662 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2663 } else {
2664 LOGE("Error, Unknown stream type");
2665 pthread_mutex_unlock(&mMutex);
2666 return -EINVAL;
2667 }
2668
2669 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002670 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2671 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002672 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002673 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002674 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2675 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2676 }
2677 }
2678
2679 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2680 it != mStreamInfo.end(); it++) {
2681 if ((*it)->stream == newStream) {
2682 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2683 break;
2684 }
2685 }
2686 } else {
2687 // Channel already exists for this stream
2688 // Do nothing for now
2689 }
2690 padding_info = gCamCapability[mCameraId]->padding_info;
2691
Emilian Peev7650c122017-01-19 08:24:33 -08002692 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002693 * since there is no real stream associated with it
2694 */
Emilian Peev7650c122017-01-19 08:24:33 -08002695 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002696 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2697 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002698 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002700 }
2701
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002702 // Let buffer dispatcher know the configured streams.
2703 mOutputBufferDispatcher.configureStreams(streamList);
2704
Binhao Lincdb362a2017-04-20 13:31:54 -07002705 // By default, preview stream TNR is disabled.
2706 // Enable TNR to the preview stream if all conditions below are satisfied:
2707 // 1. resolution <= 1080p.
2708 // 2. preview resolution == video resolution.
2709 // 3. video stream TNR is enabled.
2710 // 4. EIS2.0
2711 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2712 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2713 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2714 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2715 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2716 video_stream->width == preview_stream->width &&
2717 video_stream->height == preview_stream->height) {
2718 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2719 CAM_QCOM_FEATURE_CPP_TNR;
2720 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2721 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2722 ~CAM_QCOM_FEATURE_CDS;
2723 }
2724 }
2725
Thierry Strudel2896d122017-02-23 19:18:03 -08002726 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2727 onlyRaw = false;
2728 }
2729
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002730 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002731 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002732 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002733 cam_analysis_info_t analysisInfo;
2734 int32_t ret = NO_ERROR;
2735 ret = mCommon.getAnalysisInfo(
2736 FALSE,
2737 analysisFeatureMask,
2738 &analysisInfo);
2739 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002740 cam_color_filter_arrangement_t analysis_color_arrangement =
2741 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2742 CAM_FILTER_ARRANGEMENT_Y :
2743 gCamCapability[mCameraId]->color_arrangement);
2744 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2745 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002746 cam_dimension_t analysisDim;
2747 analysisDim = mCommon.getMatchingDimension(previewSize,
2748 analysisInfo.analysis_recommended_res);
2749
2750 mAnalysisChannel = new QCamera3SupportChannel(
2751 mCameraHandle->camera_handle,
2752 mChannelHandle,
2753 mCameraHandle->ops,
2754 &analysisInfo.analysis_padding_info,
2755 analysisFeatureMask,
2756 CAM_STREAM_TYPE_ANALYSIS,
2757 &analysisDim,
2758 (analysisInfo.analysis_format
2759 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2760 : CAM_FORMAT_YUV_420_NV21),
2761 analysisInfo.hw_analysis_supported,
2762 gCamCapability[mCameraId]->color_arrangement,
2763 this,
2764 0); // force buffer count to 0
2765 } else {
2766 LOGW("getAnalysisInfo failed, ret = %d", ret);
2767 }
2768 if (!mAnalysisChannel) {
2769 LOGW("Analysis channel cannot be created");
2770 }
2771 }
2772
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 //RAW DUMP channel
2774 if (mEnableRawDump && isRawStreamRequested == false){
2775 cam_dimension_t rawDumpSize;
2776 rawDumpSize = getMaxRawSize(mCameraId);
2777 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2778 setPAAFSupport(rawDumpFeatureMask,
2779 CAM_STREAM_TYPE_RAW,
2780 gCamCapability[mCameraId]->color_arrangement);
2781 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2782 mChannelHandle,
2783 mCameraHandle->ops,
2784 rawDumpSize,
2785 &padding_info,
2786 this, rawDumpFeatureMask);
2787 if (!mRawDumpChannel) {
2788 LOGE("Raw Dump channel cannot be created");
2789 pthread_mutex_unlock(&mMutex);
2790 return -ENOMEM;
2791 }
2792 }
2793
Thierry Strudel3d639192016-09-09 11:52:26 -07002794 if (mAnalysisChannel) {
2795 cam_analysis_info_t analysisInfo;
2796 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2797 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2798 CAM_STREAM_TYPE_ANALYSIS;
2799 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2800 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2803 &analysisInfo);
2804 if (rc != NO_ERROR) {
2805 LOGE("getAnalysisInfo failed, ret = %d", rc);
2806 pthread_mutex_unlock(&mMutex);
2807 return rc;
2808 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002809 cam_color_filter_arrangement_t analysis_color_arrangement =
2810 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2811 CAM_FILTER_ARRANGEMENT_Y :
2812 gCamCapability[mCameraId]->color_arrangement);
2813 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2814 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2815 analysis_color_arrangement);
2816
Thierry Strudel3d639192016-09-09 11:52:26 -07002817 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002818 mCommon.getMatchingDimension(previewSize,
2819 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 mStreamConfigInfo.num_streams++;
2821 }
2822
Thierry Strudel2896d122017-02-23 19:18:03 -08002823 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002824 cam_analysis_info_t supportInfo;
2825 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2826 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(callbackFeatureMask,
2828 CAM_STREAM_TYPE_CALLBACK,
2829 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002830 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002832 if (ret != NO_ERROR) {
2833 /* Ignore the error for Mono camera
2834 * because the PAAF bit mask is only set
2835 * for CAM_STREAM_TYPE_ANALYSIS stream type
2836 */
2837 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2838 LOGW("getAnalysisInfo failed, ret = %d", ret);
2839 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002840 }
2841 mSupportChannel = new QCamera3SupportChannel(
2842 mCameraHandle->camera_handle,
2843 mChannelHandle,
2844 mCameraHandle->ops,
2845 &gCamCapability[mCameraId]->padding_info,
2846 callbackFeatureMask,
2847 CAM_STREAM_TYPE_CALLBACK,
2848 &QCamera3SupportChannel::kDim,
2849 CAM_FORMAT_YUV_420_NV21,
2850 supportInfo.hw_analysis_supported,
2851 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002852 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002853 if (!mSupportChannel) {
2854 LOGE("dummy channel cannot be created");
2855 pthread_mutex_unlock(&mMutex);
2856 return -ENOMEM;
2857 }
2858 }
2859
2860 if (mSupportChannel) {
2861 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2862 QCamera3SupportChannel::kDim;
2863 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2864 CAM_STREAM_TYPE_CALLBACK;
2865 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2866 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2867 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2868 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2869 gCamCapability[mCameraId]->color_arrangement);
2870 mStreamConfigInfo.num_streams++;
2871 }
2872
2873 if (mRawDumpChannel) {
2874 cam_dimension_t rawSize;
2875 rawSize = getMaxRawSize(mCameraId);
2876 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2877 rawSize;
2878 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2879 CAM_STREAM_TYPE_RAW;
2880 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2881 CAM_QCOM_FEATURE_NONE;
2882 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2883 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2884 gCamCapability[mCameraId]->color_arrangement);
2885 mStreamConfigInfo.num_streams++;
2886 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002887
2888 if (mHdrPlusRawSrcChannel) {
2889 cam_dimension_t rawSize;
2890 rawSize = getMaxRawSize(mCameraId);
2891 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2892 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2893 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2894 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2895 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2896 gCamCapability[mCameraId]->color_arrangement);
2897 mStreamConfigInfo.num_streams++;
2898 }
2899
Thierry Strudel3d639192016-09-09 11:52:26 -07002900 /* In HFR mode, if video stream is not added, create a dummy channel so that
2901 * ISP can create a batch mode even for preview only case. This channel is
2902 * never 'start'ed (no stream-on), it is only 'initialized' */
2903 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2904 !m_bIsVideo) {
2905 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2906 setPAAFSupport(dummyFeatureMask,
2907 CAM_STREAM_TYPE_VIDEO,
2908 gCamCapability[mCameraId]->color_arrangement);
2909 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2910 mChannelHandle,
2911 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002912 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002913 this,
2914 &mDummyBatchStream,
2915 CAM_STREAM_TYPE_VIDEO,
2916 dummyFeatureMask,
2917 mMetadataChannel);
2918 if (NULL == mDummyBatchChannel) {
2919 LOGE("creation of mDummyBatchChannel failed."
2920 "Preview will use non-hfr sensor mode ");
2921 }
2922 }
2923 if (mDummyBatchChannel) {
2924 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2925 mDummyBatchStream.width;
2926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2927 mDummyBatchStream.height;
2928 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2929 CAM_STREAM_TYPE_VIDEO;
2930 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2931 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2932 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2934 gCamCapability[mCameraId]->color_arrangement);
2935 mStreamConfigInfo.num_streams++;
2936 }
2937
2938 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2939 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002940 m_bIs4KVideo ? 0 :
2941 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002942
2943 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2944 for (pendingRequestIterator i = mPendingRequestsList.begin();
2945 i != mPendingRequestsList.end();) {
2946 i = erasePendingRequest(i);
2947 }
2948 mPendingFrameDropList.clear();
2949 // Initialize/Reset the pending buffers list
2950 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2951 req.mPendingBufferList.clear();
2952 }
2953 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 mCurJpegMeta.clear();
2956 //Get min frame duration for this streams configuration
2957 deriveMinFrameDuration();
2958
Chien-Yu Chenee335912017-02-09 17:53:20 -08002959 mFirstPreviewIntentSeen = false;
2960
2961 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002962 {
2963 Mutex::Autolock l(gHdrPlusClientLock);
2964 disableHdrPlusModeLocked();
2965 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002966
Thierry Strudel3d639192016-09-09 11:52:26 -07002967 // Update state
2968 mState = CONFIGURED;
2969
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002970 mFirstMetadataCallback = true;
2971
Thierry Strudel3d639192016-09-09 11:52:26 -07002972 pthread_mutex_unlock(&mMutex);
2973
2974 return rc;
2975}
2976
2977/*===========================================================================
2978 * FUNCTION : validateCaptureRequest
2979 *
2980 * DESCRIPTION: validate a capture request from camera service
2981 *
2982 * PARAMETERS :
2983 * @request : request from framework to process
2984 *
2985 * RETURN :
2986 *
2987 *==========================================================================*/
2988int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002989 camera3_capture_request_t *request,
2990 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002991{
2992 ssize_t idx = 0;
2993 const camera3_stream_buffer_t *b;
2994 CameraMetadata meta;
2995
2996 /* Sanity check the request */
2997 if (request == NULL) {
2998 LOGE("NULL capture request");
2999 return BAD_VALUE;
3000 }
3001
3002 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3003 /*settings cannot be null for the first request*/
3004 return BAD_VALUE;
3005 }
3006
3007 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003008 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3009 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003010 LOGE("Request %d: No output buffers provided!",
3011 __FUNCTION__, frameNumber);
3012 return BAD_VALUE;
3013 }
3014 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3015 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3016 request->num_output_buffers, MAX_NUM_STREAMS);
3017 return BAD_VALUE;
3018 }
3019 if (request->input_buffer != NULL) {
3020 b = request->input_buffer;
3021 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3022 LOGE("Request %d: Buffer %ld: Status not OK!",
3023 frameNumber, (long)idx);
3024 return BAD_VALUE;
3025 }
3026 if (b->release_fence != -1) {
3027 LOGE("Request %d: Buffer %ld: Has a release fence!",
3028 frameNumber, (long)idx);
3029 return BAD_VALUE;
3030 }
3031 if (b->buffer == NULL) {
3032 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3033 frameNumber, (long)idx);
3034 return BAD_VALUE;
3035 }
3036 }
3037
3038 // Validate all buffers
3039 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003040 if (b == NULL) {
3041 return BAD_VALUE;
3042 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003043 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003044 QCamera3ProcessingChannel *channel =
3045 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3046 if (channel == NULL) {
3047 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3048 frameNumber, (long)idx);
3049 return BAD_VALUE;
3050 }
3051 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3052 LOGE("Request %d: Buffer %ld: Status not OK!",
3053 frameNumber, (long)idx);
3054 return BAD_VALUE;
3055 }
3056 if (b->release_fence != -1) {
3057 LOGE("Request %d: Buffer %ld: Has a release fence!",
3058 frameNumber, (long)idx);
3059 return BAD_VALUE;
3060 }
3061 if (b->buffer == NULL) {
3062 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3063 frameNumber, (long)idx);
3064 return BAD_VALUE;
3065 }
3066 if (*(b->buffer) == NULL) {
3067 LOGE("Request %d: Buffer %ld: NULL private handle!",
3068 frameNumber, (long)idx);
3069 return BAD_VALUE;
3070 }
3071 idx++;
3072 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 return NO_ERROR;
3075}
3076
3077/*===========================================================================
3078 * FUNCTION : deriveMinFrameDuration
3079 *
3080 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3081 * on currently configured streams.
3082 *
3083 * PARAMETERS : NONE
3084 *
3085 * RETURN : NONE
3086 *
3087 *==========================================================================*/
3088void QCamera3HardwareInterface::deriveMinFrameDuration()
3089{
3090 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3091
3092 maxJpegDim = 0;
3093 maxProcessedDim = 0;
3094 maxRawDim = 0;
3095
3096 // Figure out maximum jpeg, processed, and raw dimensions
3097 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3098 it != mStreamInfo.end(); it++) {
3099
3100 // Input stream doesn't have valid stream_type
3101 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3102 continue;
3103
3104 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3105 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3106 if (dimension > maxJpegDim)
3107 maxJpegDim = dimension;
3108 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3109 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3110 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3111 if (dimension > maxRawDim)
3112 maxRawDim = dimension;
3113 } else {
3114 if (dimension > maxProcessedDim)
3115 maxProcessedDim = dimension;
3116 }
3117 }
3118
3119 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3120 MAX_SIZES_CNT);
3121
3122 //Assume all jpeg dimensions are in processed dimensions.
3123 if (maxJpegDim > maxProcessedDim)
3124 maxProcessedDim = maxJpegDim;
3125 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3126 if (maxProcessedDim > maxRawDim) {
3127 maxRawDim = INT32_MAX;
3128
3129 for (size_t i = 0; i < count; i++) {
3130 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3131 gCamCapability[mCameraId]->raw_dim[i].height;
3132 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3133 maxRawDim = dimension;
3134 }
3135 }
3136
3137 //Find minimum durations for processed, jpeg, and raw
3138 for (size_t i = 0; i < count; i++) {
3139 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3140 gCamCapability[mCameraId]->raw_dim[i].height) {
3141 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3142 break;
3143 }
3144 }
3145 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3146 for (size_t i = 0; i < count; i++) {
3147 if (maxProcessedDim ==
3148 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3149 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3150 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3151 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3152 break;
3153 }
3154 }
3155}
3156
3157/*===========================================================================
3158 * FUNCTION : getMinFrameDuration
3159 *
3160 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3161 * and current request configuration.
3162 *
3163 * PARAMETERS : @request: requset sent by the frameworks
3164 *
3165 * RETURN : min farme duration for a particular request
3166 *
3167 *==========================================================================*/
3168int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3169{
3170 bool hasJpegStream = false;
3171 bool hasRawStream = false;
3172 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3173 const camera3_stream_t *stream = request->output_buffers[i].stream;
3174 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3175 hasJpegStream = true;
3176 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3177 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3178 stream->format == HAL_PIXEL_FORMAT_RAW16)
3179 hasRawStream = true;
3180 }
3181
3182 if (!hasJpegStream)
3183 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3184 else
3185 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3186}
3187
3188/*===========================================================================
3189 * FUNCTION : handleBuffersDuringFlushLock
3190 *
3191 * DESCRIPTION: Account for buffers returned from back-end during flush
3192 * This function is executed while mMutex is held by the caller.
3193 *
3194 * PARAMETERS :
3195 * @buffer: image buffer for the callback
3196 *
3197 * RETURN :
3198 *==========================================================================*/
3199void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3200{
3201 bool buffer_found = false;
3202 for (List<PendingBuffersInRequest>::iterator req =
3203 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3204 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3205 for (List<PendingBufferInfo>::iterator i =
3206 req->mPendingBufferList.begin();
3207 i != req->mPendingBufferList.end(); i++) {
3208 if (i->buffer == buffer->buffer) {
3209 mPendingBuffersMap.numPendingBufsAtFlush--;
3210 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3211 buffer->buffer, req->frame_number,
3212 mPendingBuffersMap.numPendingBufsAtFlush);
3213 buffer_found = true;
3214 break;
3215 }
3216 }
3217 if (buffer_found) {
3218 break;
3219 }
3220 }
3221 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3222 //signal the flush()
3223 LOGD("All buffers returned to HAL. Continue flush");
3224 pthread_cond_signal(&mBuffersCond);
3225 }
3226}
3227
Thierry Strudel3d639192016-09-09 11:52:26 -07003228/*===========================================================================
3229 * FUNCTION : handleBatchMetadata
3230 *
3231 * DESCRIPTION: Handles metadata buffer callback in batch mode
3232 *
3233 * PARAMETERS : @metadata_buf: metadata buffer
3234 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3235 * the meta buf in this method
3236 *
3237 * RETURN :
3238 *
3239 *==========================================================================*/
3240void QCamera3HardwareInterface::handleBatchMetadata(
3241 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3242{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003243 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003244
3245 if (NULL == metadata_buf) {
3246 LOGE("metadata_buf is NULL");
3247 return;
3248 }
3249 /* In batch mode, the metdata will contain the frame number and timestamp of
3250 * the last frame in the batch. Eg: a batch containing buffers from request
3251 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3252 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3253 * multiple process_capture_results */
3254 metadata_buffer_t *metadata =
3255 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3256 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3257 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3258 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3259 uint32_t frame_number = 0, urgent_frame_number = 0;
3260 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3261 bool invalid_metadata = false;
3262 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3263 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003264 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003265
3266 int32_t *p_frame_number_valid =
3267 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3268 uint32_t *p_frame_number =
3269 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3270 int64_t *p_capture_time =
3271 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3272 int32_t *p_urgent_frame_number_valid =
3273 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3274 uint32_t *p_urgent_frame_number =
3275 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3276
3277 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3278 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3279 (NULL == p_urgent_frame_number)) {
3280 LOGE("Invalid metadata");
3281 invalid_metadata = true;
3282 } else {
3283 frame_number_valid = *p_frame_number_valid;
3284 last_frame_number = *p_frame_number;
3285 last_frame_capture_time = *p_capture_time;
3286 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3287 last_urgent_frame_number = *p_urgent_frame_number;
3288 }
3289
3290 /* In batchmode, when no video buffers are requested, set_parms are sent
3291 * for every capture_request. The difference between consecutive urgent
3292 * frame numbers and frame numbers should be used to interpolate the
3293 * corresponding frame numbers and time stamps */
3294 pthread_mutex_lock(&mMutex);
3295 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003296 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3297 if(idx < 0) {
3298 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3299 last_urgent_frame_number);
3300 mState = ERROR;
3301 pthread_mutex_unlock(&mMutex);
3302 return;
3303 }
3304 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003305 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3306 first_urgent_frame_number;
3307
3308 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3309 urgent_frame_number_valid,
3310 first_urgent_frame_number, last_urgent_frame_number);
3311 }
3312
3313 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003314 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3315 if(idx < 0) {
3316 LOGE("Invalid frame number received: %d. Irrecoverable error",
3317 last_frame_number);
3318 mState = ERROR;
3319 pthread_mutex_unlock(&mMutex);
3320 return;
3321 }
3322 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323 frameNumDiff = last_frame_number + 1 -
3324 first_frame_number;
3325 mPendingBatchMap.removeItem(last_frame_number);
3326
3327 LOGD("frm: valid: %d frm_num: %d - %d",
3328 frame_number_valid,
3329 first_frame_number, last_frame_number);
3330
3331 }
3332 pthread_mutex_unlock(&mMutex);
3333
3334 if (urgent_frame_number_valid || frame_number_valid) {
3335 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3336 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3337 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3338 urgentFrameNumDiff, last_urgent_frame_number);
3339 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3340 LOGE("frameNumDiff: %d frameNum: %d",
3341 frameNumDiff, last_frame_number);
3342 }
3343
3344 for (size_t i = 0; i < loopCount; i++) {
3345 /* handleMetadataWithLock is called even for invalid_metadata for
3346 * pipeline depth calculation */
3347 if (!invalid_metadata) {
3348 /* Infer frame number. Batch metadata contains frame number of the
3349 * last frame */
3350 if (urgent_frame_number_valid) {
3351 if (i < urgentFrameNumDiff) {
3352 urgent_frame_number =
3353 first_urgent_frame_number + i;
3354 LOGD("inferred urgent frame_number: %d",
3355 urgent_frame_number);
3356 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3357 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3358 } else {
3359 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3360 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3361 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3362 }
3363 }
3364
3365 /* Infer frame number. Batch metadata contains frame number of the
3366 * last frame */
3367 if (frame_number_valid) {
3368 if (i < frameNumDiff) {
3369 frame_number = first_frame_number + i;
3370 LOGD("inferred frame_number: %d", frame_number);
3371 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3372 CAM_INTF_META_FRAME_NUMBER, frame_number);
3373 } else {
3374 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3375 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3376 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3377 }
3378 }
3379
3380 if (last_frame_capture_time) {
3381 //Infer timestamp
3382 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003383 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003384 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003385 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003386 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3387 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3388 LOGD("batch capture_time: %lld, capture_time: %lld",
3389 last_frame_capture_time, capture_time);
3390 }
3391 }
3392 pthread_mutex_lock(&mMutex);
3393 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003394 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003395 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3396 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003397 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 pthread_mutex_unlock(&mMutex);
3399 }
3400
3401 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003402 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003403 mMetadataChannel->bufDone(metadata_buf);
3404 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003405 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 }
3407}
3408
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003409void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3410 camera3_error_msg_code_t errorCode)
3411{
3412 camera3_notify_msg_t notify_msg;
3413 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3414 notify_msg.type = CAMERA3_MSG_ERROR;
3415 notify_msg.message.error.error_code = errorCode;
3416 notify_msg.message.error.error_stream = NULL;
3417 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003418 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003419
3420 return;
3421}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003422
3423/*===========================================================================
3424 * FUNCTION : sendPartialMetadataWithLock
3425 *
3426 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3427 *
3428 * PARAMETERS : @metadata: metadata buffer
3429 * @requestIter: The iterator for the pending capture request for
3430 * which the partial result is being sen
3431 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3432 * last urgent metadata in a batch. Always true for non-batch mode
3433 *
3434 * RETURN :
3435 *
3436 *==========================================================================*/
3437
3438void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3439 metadata_buffer_t *metadata,
3440 const pendingRequestIterator requestIter,
3441 bool lastUrgentMetadataInBatch)
3442{
3443 camera3_capture_result_t result;
3444 memset(&result, 0, sizeof(camera3_capture_result_t));
3445
3446 requestIter->partial_result_cnt++;
3447
3448 // Extract 3A metadata
3449 result.result = translateCbUrgentMetadataToResultMetadata(
3450 metadata, lastUrgentMetadataInBatch);
3451 // Populate metadata result
3452 result.frame_number = requestIter->frame_number;
3453 result.num_output_buffers = 0;
3454 result.output_buffers = NULL;
3455 result.partial_result = requestIter->partial_result_cnt;
3456
3457 {
3458 Mutex::Autolock l(gHdrPlusClientLock);
3459 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3460 // Notify HDR+ client about the partial metadata.
3461 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3462 result.partial_result == PARTIAL_RESULT_COUNT);
3463 }
3464 }
3465
3466 orchestrateResult(&result);
3467 LOGD("urgent frame_number = %u", result.frame_number);
3468 free_camera_metadata((camera_metadata_t *)result.result);
3469}
3470
Thierry Strudel3d639192016-09-09 11:52:26 -07003471/*===========================================================================
3472 * FUNCTION : handleMetadataWithLock
3473 *
3474 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3475 *
3476 * PARAMETERS : @metadata_buf: metadata buffer
3477 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3478 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003479 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3480 * last urgent metadata in a batch. Always true for non-batch mode
3481 * @lastMetadataInBatch: Boolean to indicate whether this is the
3482 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003483 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3484 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 *
3486 * RETURN :
3487 *
3488 *==========================================================================*/
3489void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003490 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003491 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3492 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003493{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003494 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3496 //during flush do not send metadata from this thread
3497 LOGD("not sending metadata during flush or when mState is error");
3498 if (free_and_bufdone_meta_buf) {
3499 mMetadataChannel->bufDone(metadata_buf);
3500 free(metadata_buf);
3501 }
3502 return;
3503 }
3504
3505 //not in flush
3506 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3507 int32_t frame_number_valid, urgent_frame_number_valid;
3508 uint32_t frame_number, urgent_frame_number;
3509 int64_t capture_time;
3510 nsecs_t currentSysTime;
3511
3512 int32_t *p_frame_number_valid =
3513 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3514 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3515 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3516 int32_t *p_urgent_frame_number_valid =
3517 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3518 uint32_t *p_urgent_frame_number =
3519 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3520 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3521 metadata) {
3522 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3523 *p_frame_number_valid, *p_frame_number);
3524 }
3525
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003526 camera_metadata_t *resultMetadata = nullptr;
3527
Thierry Strudel3d639192016-09-09 11:52:26 -07003528 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3529 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3530 LOGE("Invalid metadata");
3531 if (free_and_bufdone_meta_buf) {
3532 mMetadataChannel->bufDone(metadata_buf);
3533 free(metadata_buf);
3534 }
3535 goto done_metadata;
3536 }
3537 frame_number_valid = *p_frame_number_valid;
3538 frame_number = *p_frame_number;
3539 capture_time = *p_capture_time;
3540 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3541 urgent_frame_number = *p_urgent_frame_number;
3542 currentSysTime = systemTime(CLOCK_MONOTONIC);
3543
3544 // Detect if buffers from any requests are overdue
3545 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003546 int64_t timeout;
3547 {
3548 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3549 // If there is a pending HDR+ request, the following requests may be blocked until the
3550 // HDR+ request is done. So allow a longer timeout.
3551 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3552 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3553 }
3554
3555 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003556 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003557 assert(missed.stream->priv);
3558 if (missed.stream->priv) {
3559 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3560 assert(ch->mStreams[0]);
3561 if (ch->mStreams[0]) {
3562 LOGE("Cancel missing frame = %d, buffer = %p,"
3563 "stream type = %d, stream format = %d",
3564 req.frame_number, missed.buffer,
3565 ch->mStreams[0]->getMyType(), missed.stream->format);
3566 ch->timeoutFrame(req.frame_number);
3567 }
3568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003569 }
3570 }
3571 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003572 //For the very first metadata callback, regardless whether it contains valid
3573 //frame number, send the partial metadata for the jumpstarting requests.
3574 //Note that this has to be done even if the metadata doesn't contain valid
3575 //urgent frame number, because in the case only 1 request is ever submitted
3576 //to HAL, there won't be subsequent valid urgent frame number.
3577 if (mFirstMetadataCallback) {
3578 for (pendingRequestIterator i =
3579 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3580 if (i->bUseFirstPartial) {
3581 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3582 }
3583 }
3584 mFirstMetadataCallback = false;
3585 }
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 //Partial result on process_capture_result for timestamp
3588 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003589 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003590
3591 //Recieved an urgent Frame Number, handle it
3592 //using partial results
3593 for (pendingRequestIterator i =
3594 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3595 LOGD("Iterator Frame = %d urgent frame = %d",
3596 i->frame_number, urgent_frame_number);
3597
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003598 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003599 (i->partial_result_cnt == 0)) {
3600 LOGE("Error: HAL missed urgent metadata for frame number %d",
3601 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003602 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003603 }
3604
3605 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003606 i->partial_result_cnt == 0) {
3607 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003608 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3609 // Instant AEC settled for this frame.
3610 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3611 mInstantAECSettledFrameNumber = urgent_frame_number;
3612 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003613 break;
3614 }
3615 }
3616 }
3617
3618 if (!frame_number_valid) {
3619 LOGD("Not a valid normal frame number, used as SOF only");
3620 if (free_and_bufdone_meta_buf) {
3621 mMetadataChannel->bufDone(metadata_buf);
3622 free(metadata_buf);
3623 }
3624 goto done_metadata;
3625 }
3626 LOGH("valid frame_number = %u, capture_time = %lld",
3627 frame_number, capture_time);
3628
Emilian Peev7650c122017-01-19 08:24:33 -08003629 if (metadata->is_depth_data_valid) {
3630 handleDepthDataLocked(metadata->depth_data, frame_number);
3631 }
3632
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633 // Check whether any stream buffer corresponding to this is dropped or not
3634 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3635 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3636 for (auto & pendingRequest : mPendingRequestsList) {
3637 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3638 mInstantAECSettledFrameNumber)) {
3639 camera3_notify_msg_t notify_msg = {};
3640 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003641 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 QCamera3ProcessingChannel *channel =
3643 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003644 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003645 if (p_cam_frame_drop) {
3646 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003647 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003648 // Got the stream ID for drop frame.
3649 dropFrame = true;
3650 break;
3651 }
3652 }
3653 } else {
3654 // This is instant AEC case.
3655 // For instant AEC drop the stream untill AEC is settled.
3656 dropFrame = true;
3657 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003658
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003659 if (dropFrame) {
3660 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3661 if (p_cam_frame_drop) {
3662 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003663 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003665 } else {
3666 // For instant AEC, inform frame drop and frame number
3667 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3668 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003669 pendingRequest.frame_number, streamID,
3670 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003671 }
3672 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003674 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003675 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003676 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003677 if (p_cam_frame_drop) {
3678 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003679 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003681 } else {
3682 // For instant AEC, inform frame drop and frame number
3683 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3684 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003685 pendingRequest.frame_number, streamID,
3686 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003687 }
3688 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003689 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003690 PendingFrameDrop.stream_ID = streamID;
3691 // Add the Frame drop info to mPendingFrameDropList
3692 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003693 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003694 }
3695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003697
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003698 for (auto & pendingRequest : mPendingRequestsList) {
3699 // Find the pending request with the frame number.
3700 if (pendingRequest.frame_number == frame_number) {
3701 // Update the sensor timestamp.
3702 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003703
Thierry Strudel3d639192016-09-09 11:52:26 -07003704
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003705 /* Set the timestamp in display metadata so that clients aware of
3706 private_handle such as VT can use this un-modified timestamps.
3707 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003708 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003709
Thierry Strudel3d639192016-09-09 11:52:26 -07003710 // Find channel requiring metadata, meaning internal offline postprocess
3711 // is needed.
3712 //TODO: for now, we don't support two streams requiring metadata at the same time.
3713 // (because we are not making copies, and metadata buffer is not reference counted.
3714 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3716 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003717 if (iter->need_metadata) {
3718 internalPproc = true;
3719 QCamera3ProcessingChannel *channel =
3720 (QCamera3ProcessingChannel *)iter->stream->priv;
3721 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003722 if(p_is_metabuf_queued != NULL) {
3723 *p_is_metabuf_queued = true;
3724 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 break;
3726 }
3727 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003728 for (auto itr = pendingRequest.internalRequestList.begin();
3729 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003730 if (itr->need_metadata) {
3731 internalPproc = true;
3732 QCamera3ProcessingChannel *channel =
3733 (QCamera3ProcessingChannel *)itr->stream->priv;
3734 channel->queueReprocMetadata(metadata_buf);
3735 break;
3736 }
3737 }
3738
Thierry Strudel54dc9782017-02-15 12:12:10 -08003739 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003740
3741 bool *enableZsl = nullptr;
3742 if (gExposeEnableZslKey) {
3743 enableZsl = &pendingRequest.enableZsl;
3744 }
3745
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 resultMetadata = translateFromHalMetadata(metadata,
3747 pendingRequest.timestamp, pendingRequest.request_id,
3748 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3749 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003750 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003751 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003752 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003753 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003754 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003755 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003756
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003757 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003758
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003759 if (pendingRequest.blob_request) {
3760 //Dump tuning metadata if enabled and available
3761 char prop[PROPERTY_VALUE_MAX];
3762 memset(prop, 0, sizeof(prop));
3763 property_get("persist.camera.dumpmetadata", prop, "0");
3764 int32_t enabled = atoi(prop);
3765 if (enabled && metadata->is_tuning_params_valid) {
3766 dumpMetadataToFile(metadata->tuning_params,
3767 mMetaFrameCount,
3768 enabled,
3769 "Snapshot",
3770 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003771 }
3772 }
3773
3774 if (!internalPproc) {
3775 LOGD("couldn't find need_metadata for this metadata");
3776 // Return metadata buffer
3777 if (free_and_bufdone_meta_buf) {
3778 mMetadataChannel->bufDone(metadata_buf);
3779 free(metadata_buf);
3780 }
3781 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003782
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003783 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003784 }
3785 }
3786
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003787 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3788
3789 // Try to send out capture result metadata.
3790 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003791 return;
3792
Thierry Strudel3d639192016-09-09 11:52:26 -07003793done_metadata:
3794 for (pendingRequestIterator i = mPendingRequestsList.begin();
3795 i != mPendingRequestsList.end() ;i++) {
3796 i->pipeline_depth++;
3797 }
3798 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3799 unblockRequestIfNecessary();
3800}
3801
3802/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003803 * FUNCTION : handleDepthDataWithLock
3804 *
3805 * DESCRIPTION: Handles incoming depth data
3806 *
3807 * PARAMETERS : @depthData : Depth data
3808 * @frameNumber: Frame number of the incoming depth data
3809 *
3810 * RETURN :
3811 *
3812 *==========================================================================*/
3813void QCamera3HardwareInterface::handleDepthDataLocked(
3814 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3815 uint32_t currentFrameNumber;
3816 buffer_handle_t *depthBuffer;
3817
3818 if (nullptr == mDepthChannel) {
3819 LOGE("Depth channel not present!");
3820 return;
3821 }
3822
3823 camera3_stream_buffer_t resultBuffer =
3824 {.acquire_fence = -1,
3825 .release_fence = -1,
3826 .status = CAMERA3_BUFFER_STATUS_OK,
3827 .buffer = nullptr,
3828 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003829 do {
3830 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3831 if (nullptr == depthBuffer) {
3832 break;
3833 }
3834
Emilian Peev7650c122017-01-19 08:24:33 -08003835 resultBuffer.buffer = depthBuffer;
3836 if (currentFrameNumber == frameNumber) {
3837 int32_t rc = mDepthChannel->populateDepthData(depthData,
3838 frameNumber);
3839 if (NO_ERROR != rc) {
3840 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3841 } else {
3842 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3843 }
3844 } else if (currentFrameNumber > frameNumber) {
3845 break;
3846 } else {
3847 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3848 {{currentFrameNumber, mDepthChannel->getStream(),
3849 CAMERA3_MSG_ERROR_BUFFER}}};
3850 orchestrateNotify(&notify_msg);
3851
3852 LOGE("Depth buffer for frame number: %d is missing "
3853 "returning back!", currentFrameNumber);
3854 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3855 }
3856 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003857 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003858 } while (currentFrameNumber < frameNumber);
3859}
3860
3861/*===========================================================================
3862 * FUNCTION : notifyErrorFoPendingDepthData
3863 *
3864 * DESCRIPTION: Returns error for any pending depth buffers
3865 *
3866 * PARAMETERS : depthCh - depth channel that needs to get flushed
3867 *
3868 * RETURN :
3869 *
3870 *==========================================================================*/
3871void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3872 QCamera3DepthChannel *depthCh) {
3873 uint32_t currentFrameNumber;
3874 buffer_handle_t *depthBuffer;
3875
3876 if (nullptr == depthCh) {
3877 return;
3878 }
3879
3880 camera3_notify_msg_t notify_msg =
3881 {.type = CAMERA3_MSG_ERROR,
3882 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3883 camera3_stream_buffer_t resultBuffer =
3884 {.acquire_fence = -1,
3885 .release_fence = -1,
3886 .buffer = nullptr,
3887 .stream = depthCh->getStream(),
3888 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003889
3890 while (nullptr !=
3891 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3892 depthCh->unmapBuffer(currentFrameNumber);
3893
3894 notify_msg.message.error.frame_number = currentFrameNumber;
3895 orchestrateNotify(&notify_msg);
3896
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003897 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003898 };
3899}
3900
3901/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 * FUNCTION : hdrPlusPerfLock
3903 *
3904 * DESCRIPTION: perf lock for HDR+ using custom intent
3905 *
3906 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3907 *
3908 * RETURN : None
3909 *
3910 *==========================================================================*/
3911void QCamera3HardwareInterface::hdrPlusPerfLock(
3912 mm_camera_super_buf_t *metadata_buf)
3913{
3914 if (NULL == metadata_buf) {
3915 LOGE("metadata_buf is NULL");
3916 return;
3917 }
3918 metadata_buffer_t *metadata =
3919 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3920 int32_t *p_frame_number_valid =
3921 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3922 uint32_t *p_frame_number =
3923 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3924
3925 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3926 LOGE("%s: Invalid metadata", __func__);
3927 return;
3928 }
3929
3930 //acquire perf lock for 5 sec after the last HDR frame is captured
3931 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3932 if ((p_frame_number != NULL) &&
3933 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003934 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003935 }
3936 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003937}
3938
3939/*===========================================================================
3940 * FUNCTION : handleInputBufferWithLock
3941 *
3942 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3943 *
3944 * PARAMETERS : @frame_number: frame number of the input buffer
3945 *
3946 * RETURN :
3947 *
3948 *==========================================================================*/
3949void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3950{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003951 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003952 pendingRequestIterator i = mPendingRequestsList.begin();
3953 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3954 i++;
3955 }
3956 if (i != mPendingRequestsList.end() && i->input_buffer) {
3957 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003958 CameraMetadata settings;
3959 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3960 if(i->settings) {
3961 settings = i->settings;
3962 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3963 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07003964 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003965 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07003966 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003967 } else {
3968 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07003969 }
3970
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003971 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3972 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3973 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07003974
3975 camera3_capture_result result;
3976 memset(&result, 0, sizeof(camera3_capture_result));
3977 result.frame_number = frame_number;
3978 result.result = i->settings;
3979 result.input_buffer = i->input_buffer;
3980 result.partial_result = PARTIAL_RESULT_COUNT;
3981
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003982 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003983 LOGD("Input request metadata and input buffer frame_number = %u",
3984 i->frame_number);
3985 i = erasePendingRequest(i);
3986 } else {
3987 LOGE("Could not find input request for frame number %d", frame_number);
3988 }
3989}
3990
3991/*===========================================================================
3992 * FUNCTION : handleBufferWithLock
3993 *
3994 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3995 *
3996 * PARAMETERS : @buffer: image buffer for the callback
3997 * @frame_number: frame number of the image buffer
3998 *
3999 * RETURN :
4000 *
4001 *==========================================================================*/
4002void QCamera3HardwareInterface::handleBufferWithLock(
4003 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4004{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004005 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004006
4007 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4008 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4009 }
4010
Thierry Strudel3d639192016-09-09 11:52:26 -07004011 /* Nothing to be done during error state */
4012 if ((ERROR == mState) || (DEINIT == mState)) {
4013 return;
4014 }
4015 if (mFlushPerf) {
4016 handleBuffersDuringFlushLock(buffer);
4017 return;
4018 }
4019 //not in flush
4020 // If the frame number doesn't exist in the pending request list,
4021 // directly send the buffer to the frameworks, and update pending buffers map
4022 // Otherwise, book-keep the buffer.
4023 pendingRequestIterator i = mPendingRequestsList.begin();
4024 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4025 i++;
4026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004027
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004028 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004029 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004030 // For a reprocessing request, try to send out result metadata.
4031 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004033 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004034
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004035 // Check if this frame was dropped.
4036 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4037 m != mPendingFrameDropList.end(); m++) {
4038 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4039 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4040 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4041 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4042 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4043 frame_number, streamID);
4044 m = mPendingFrameDropList.erase(m);
4045 break;
4046 }
4047 }
4048
4049 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4050 LOGH("result frame_number = %d, buffer = %p",
4051 frame_number, buffer->buffer);
4052
4053 mPendingBuffersMap.removeBuf(buffer->buffer);
4054 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4055
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004056 if (mPreviewStarted == false) {
4057 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4058 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004059 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4060
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004061 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4062 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4063 mPreviewStarted = true;
4064
4065 // Set power hint for preview
4066 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4067 }
4068 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004069}
4070
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004071void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004072 const camera_metadata_t *resultMetadata)
4073{
4074 // Find the pending request for this result metadata.
4075 auto requestIter = mPendingRequestsList.begin();
4076 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4077 requestIter++;
4078 }
4079
4080 if (requestIter == mPendingRequestsList.end()) {
4081 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4082 return;
4083 }
4084
4085 // Update the result metadata
4086 requestIter->resultMetadata = resultMetadata;
4087
4088 // Check what type of request this is.
4089 bool liveRequest = false;
4090 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004091 // HDR+ request doesn't have partial results.
4092 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004093 } else if (requestIter->input_buffer != nullptr) {
4094 // Reprocessing request result is the same as settings.
4095 requestIter->resultMetadata = requestIter->settings;
4096 // Reprocessing request doesn't have partial results.
4097 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4098 } else {
4099 liveRequest = true;
4100 requestIter->partial_result_cnt++;
4101 mPendingLiveRequest--;
4102
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004103 {
4104 Mutex::Autolock l(gHdrPlusClientLock);
4105 // For a live request, send the metadata to HDR+ client.
4106 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4107 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4108 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4109 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 }
4111 }
4112
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004113 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4114 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004115 bool readyToSend = true;
4116
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004117 // Iterate through the pending requests to send out result metadata that are ready. Also if
4118 // this result metadata belongs to a live request, notify errors for previous live requests
4119 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004120 auto iter = mPendingRequestsList.begin();
4121 while (iter != mPendingRequestsList.end()) {
4122 // Check if current pending request is ready. If it's not ready, the following pending
4123 // requests are also not ready.
4124 if (readyToSend && iter->resultMetadata == nullptr) {
4125 readyToSend = false;
4126 }
4127
4128 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4129
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004130 camera3_capture_result_t result = {};
4131 result.frame_number = iter->frame_number;
4132 result.result = iter->resultMetadata;
4133 result.partial_result = iter->partial_result_cnt;
4134
4135 // If this pending buffer has result metadata, we may be able to send out shutter callback
4136 // and result metadata.
4137 if (iter->resultMetadata != nullptr) {
4138 if (!readyToSend) {
4139 // If any of the previous pending request is not ready, this pending request is
4140 // also not ready to send in order to keep shutter callbacks and result metadata
4141 // in order.
4142 iter++;
4143 continue;
4144 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004145 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4146 // If the result metadata belongs to a live request, notify errors for previous pending
4147 // live requests.
4148 mPendingLiveRequest--;
4149
4150 CameraMetadata dummyMetadata;
4151 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4152 result.result = dummyMetadata.release();
4153
4154 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004155
4156 // partial_result should be PARTIAL_RESULT_CNT in case of
4157 // ERROR_RESULT.
4158 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4159 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004160 } else {
4161 iter++;
4162 continue;
4163 }
4164
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004165 result.output_buffers = nullptr;
4166 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 orchestrateResult(&result);
4168
4169 // For reprocessing, result metadata is the same as settings so do not free it here to
4170 // avoid double free.
4171 if (result.result != iter->settings) {
4172 free_camera_metadata((camera_metadata_t *)result.result);
4173 }
4174 iter->resultMetadata = nullptr;
4175 iter = erasePendingRequest(iter);
4176 }
4177
4178 if (liveRequest) {
4179 for (auto &iter : mPendingRequestsList) {
4180 // Increment pipeline depth for the following pending requests.
4181 if (iter.frame_number > frameNumber) {
4182 iter.pipeline_depth++;
4183 }
4184 }
4185 }
4186
4187 unblockRequestIfNecessary();
4188}
4189
Thierry Strudel3d639192016-09-09 11:52:26 -07004190/*===========================================================================
4191 * FUNCTION : unblockRequestIfNecessary
4192 *
4193 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4194 * that mMutex is held when this function is called.
4195 *
4196 * PARAMETERS :
4197 *
4198 * RETURN :
4199 *
4200 *==========================================================================*/
4201void QCamera3HardwareInterface::unblockRequestIfNecessary()
4202{
4203 // Unblock process_capture_request
4204 pthread_cond_signal(&mRequestCond);
4205}
4206
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004207/*===========================================================================
4208 * FUNCTION : isHdrSnapshotRequest
4209 *
4210 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4211 *
4212 * PARAMETERS : camera3 request structure
4213 *
4214 * RETURN : boolean decision variable
4215 *
4216 *==========================================================================*/
4217bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4218{
4219 if (request == NULL) {
4220 LOGE("Invalid request handle");
4221 assert(0);
4222 return false;
4223 }
4224
4225 if (!mForceHdrSnapshot) {
4226 CameraMetadata frame_settings;
4227 frame_settings = request->settings;
4228
4229 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4230 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4231 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4232 return false;
4233 }
4234 } else {
4235 return false;
4236 }
4237
4238 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4239 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4240 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4241 return false;
4242 }
4243 } else {
4244 return false;
4245 }
4246 }
4247
4248 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4249 if (request->output_buffers[i].stream->format
4250 == HAL_PIXEL_FORMAT_BLOB) {
4251 return true;
4252 }
4253 }
4254
4255 return false;
4256}
4257/*===========================================================================
4258 * FUNCTION : orchestrateRequest
4259 *
4260 * DESCRIPTION: Orchestrates a capture request from camera service
4261 *
4262 * PARAMETERS :
4263 * @request : request from framework to process
4264 *
4265 * RETURN : Error status codes
4266 *
4267 *==========================================================================*/
4268int32_t QCamera3HardwareInterface::orchestrateRequest(
4269 camera3_capture_request_t *request)
4270{
4271
4272 uint32_t originalFrameNumber = request->frame_number;
4273 uint32_t originalOutputCount = request->num_output_buffers;
4274 const camera_metadata_t *original_settings = request->settings;
4275 List<InternalRequest> internallyRequestedStreams;
4276 List<InternalRequest> emptyInternalList;
4277
4278 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4279 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4280 uint32_t internalFrameNumber;
4281 CameraMetadata modified_meta;
4282
4283
4284 /* Add Blob channel to list of internally requested streams */
4285 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4286 if (request->output_buffers[i].stream->format
4287 == HAL_PIXEL_FORMAT_BLOB) {
4288 InternalRequest streamRequested;
4289 streamRequested.meteringOnly = 1;
4290 streamRequested.need_metadata = 0;
4291 streamRequested.stream = request->output_buffers[i].stream;
4292 internallyRequestedStreams.push_back(streamRequested);
4293 }
4294 }
4295 request->num_output_buffers = 0;
4296 auto itr = internallyRequestedStreams.begin();
4297
4298 /* Modify setting to set compensation */
4299 modified_meta = request->settings;
4300 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4301 uint8_t aeLock = 1;
4302 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4303 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4304 camera_metadata_t *modified_settings = modified_meta.release();
4305 request->settings = modified_settings;
4306
4307 /* Capture Settling & -2x frame */
4308 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4309 request->frame_number = internalFrameNumber;
4310 processCaptureRequest(request, internallyRequestedStreams);
4311
4312 request->num_output_buffers = originalOutputCount;
4313 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4314 request->frame_number = internalFrameNumber;
4315 processCaptureRequest(request, emptyInternalList);
4316 request->num_output_buffers = 0;
4317
4318 modified_meta = modified_settings;
4319 expCompensation = 0;
4320 aeLock = 1;
4321 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4322 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4323 modified_settings = modified_meta.release();
4324 request->settings = modified_settings;
4325
4326 /* Capture Settling & 0X frame */
4327
4328 itr = internallyRequestedStreams.begin();
4329 if (itr == internallyRequestedStreams.end()) {
4330 LOGE("Error Internally Requested Stream list is empty");
4331 assert(0);
4332 } else {
4333 itr->need_metadata = 0;
4334 itr->meteringOnly = 1;
4335 }
4336
4337 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4338 request->frame_number = internalFrameNumber;
4339 processCaptureRequest(request, internallyRequestedStreams);
4340
4341 itr = internallyRequestedStreams.begin();
4342 if (itr == internallyRequestedStreams.end()) {
4343 ALOGE("Error Internally Requested Stream list is empty");
4344 assert(0);
4345 } else {
4346 itr->need_metadata = 1;
4347 itr->meteringOnly = 0;
4348 }
4349
4350 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4351 request->frame_number = internalFrameNumber;
4352 processCaptureRequest(request, internallyRequestedStreams);
4353
4354 /* Capture 2X frame*/
4355 modified_meta = modified_settings;
4356 expCompensation = GB_HDR_2X_STEP_EV;
4357 aeLock = 1;
4358 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4359 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4360 modified_settings = modified_meta.release();
4361 request->settings = modified_settings;
4362
4363 itr = internallyRequestedStreams.begin();
4364 if (itr == internallyRequestedStreams.end()) {
4365 ALOGE("Error Internally Requested Stream list is empty");
4366 assert(0);
4367 } else {
4368 itr->need_metadata = 0;
4369 itr->meteringOnly = 1;
4370 }
4371 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4372 request->frame_number = internalFrameNumber;
4373 processCaptureRequest(request, internallyRequestedStreams);
4374
4375 itr = internallyRequestedStreams.begin();
4376 if (itr == internallyRequestedStreams.end()) {
4377 ALOGE("Error Internally Requested Stream list is empty");
4378 assert(0);
4379 } else {
4380 itr->need_metadata = 1;
4381 itr->meteringOnly = 0;
4382 }
4383
4384 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4385 request->frame_number = internalFrameNumber;
4386 processCaptureRequest(request, internallyRequestedStreams);
4387
4388
4389 /* Capture 2X on original streaming config*/
4390 internallyRequestedStreams.clear();
4391
4392 /* Restore original settings pointer */
4393 request->settings = original_settings;
4394 } else {
4395 uint32_t internalFrameNumber;
4396 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4397 request->frame_number = internalFrameNumber;
4398 return processCaptureRequest(request, internallyRequestedStreams);
4399 }
4400
4401 return NO_ERROR;
4402}
4403
4404/*===========================================================================
4405 * FUNCTION : orchestrateResult
4406 *
4407 * DESCRIPTION: Orchestrates a capture result to camera service
4408 *
4409 * PARAMETERS :
4410 * @request : request from framework to process
4411 *
4412 * RETURN :
4413 *
4414 *==========================================================================*/
4415void QCamera3HardwareInterface::orchestrateResult(
4416 camera3_capture_result_t *result)
4417{
4418 uint32_t frameworkFrameNumber;
4419 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4420 frameworkFrameNumber);
4421 if (rc != NO_ERROR) {
4422 LOGE("Cannot find translated frameworkFrameNumber");
4423 assert(0);
4424 } else {
4425 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004426 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004427 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004428 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004429 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4430 camera_metadata_entry_t entry;
4431 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4432 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004433 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004434 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4435 if (ret != OK)
4436 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004437 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004438 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004439 result->frame_number = frameworkFrameNumber;
4440 mCallbackOps->process_capture_result(mCallbackOps, result);
4441 }
4442 }
4443}
4444
4445/*===========================================================================
4446 * FUNCTION : orchestrateNotify
4447 *
4448 * DESCRIPTION: Orchestrates a notify to camera service
4449 *
4450 * PARAMETERS :
4451 * @request : request from framework to process
4452 *
4453 * RETURN :
4454 *
4455 *==========================================================================*/
4456void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4457{
4458 uint32_t frameworkFrameNumber;
4459 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004460 int32_t rc = NO_ERROR;
4461
4462 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004463 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004464
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004465 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004466 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4467 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4468 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004469 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004470 LOGE("Cannot find translated frameworkFrameNumber");
4471 assert(0);
4472 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004473 }
4474 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004475
4476 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4477 LOGD("Internal Request drop the notifyCb");
4478 } else {
4479 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4480 mCallbackOps->notify(mCallbackOps, notify_msg);
4481 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004482}
4483
4484/*===========================================================================
4485 * FUNCTION : FrameNumberRegistry
4486 *
4487 * DESCRIPTION: Constructor
4488 *
4489 * PARAMETERS :
4490 *
4491 * RETURN :
4492 *
4493 *==========================================================================*/
4494FrameNumberRegistry::FrameNumberRegistry()
4495{
4496 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4497}
4498
4499/*===========================================================================
4500 * FUNCTION : ~FrameNumberRegistry
4501 *
4502 * DESCRIPTION: Destructor
4503 *
4504 * PARAMETERS :
4505 *
4506 * RETURN :
4507 *
4508 *==========================================================================*/
4509FrameNumberRegistry::~FrameNumberRegistry()
4510{
4511}
4512
4513/*===========================================================================
4514 * FUNCTION : PurgeOldEntriesLocked
4515 *
4516 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4517 *
4518 * PARAMETERS :
4519 *
4520 * RETURN : NONE
4521 *
4522 *==========================================================================*/
4523void FrameNumberRegistry::purgeOldEntriesLocked()
4524{
4525 while (_register.begin() != _register.end()) {
4526 auto itr = _register.begin();
4527 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4528 _register.erase(itr);
4529 } else {
4530 return;
4531 }
4532 }
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : allocStoreInternalFrameNumber
4537 *
4538 * DESCRIPTION: Method to note down a framework request and associate a new
4539 * internal request number against it
4540 *
4541 * PARAMETERS :
4542 * @fFrameNumber: Identifier given by framework
4543 * @internalFN : Output parameter which will have the newly generated internal
4544 * entry
4545 *
4546 * RETURN : Error code
4547 *
4548 *==========================================================================*/
4549int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4550 uint32_t &internalFrameNumber)
4551{
4552 Mutex::Autolock lock(mRegistryLock);
4553 internalFrameNumber = _nextFreeInternalNumber++;
4554 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4555 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4556 purgeOldEntriesLocked();
4557 return NO_ERROR;
4558}
4559
4560/*===========================================================================
4561 * FUNCTION : generateStoreInternalFrameNumber
4562 *
4563 * DESCRIPTION: Method to associate a new internal request number independent
4564 * of any associate with framework requests
4565 *
4566 * PARAMETERS :
4567 * @internalFrame#: Output parameter which will have the newly generated internal
4568 *
4569 *
4570 * RETURN : Error code
4571 *
4572 *==========================================================================*/
4573int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4574{
4575 Mutex::Autolock lock(mRegistryLock);
4576 internalFrameNumber = _nextFreeInternalNumber++;
4577 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4578 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4579 purgeOldEntriesLocked();
4580 return NO_ERROR;
4581}
4582
4583/*===========================================================================
4584 * FUNCTION : getFrameworkFrameNumber
4585 *
4586 * DESCRIPTION: Method to query the framework framenumber given an internal #
4587 *
4588 * PARAMETERS :
4589 * @internalFrame#: Internal reference
4590 * @frameworkframenumber: Output parameter holding framework frame entry
4591 *
4592 * RETURN : Error code
4593 *
4594 *==========================================================================*/
4595int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4596 uint32_t &frameworkFrameNumber)
4597{
4598 Mutex::Autolock lock(mRegistryLock);
4599 auto itr = _register.find(internalFrameNumber);
4600 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004601 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004602 return -ENOENT;
4603 }
4604
4605 frameworkFrameNumber = itr->second;
4606 purgeOldEntriesLocked();
4607 return NO_ERROR;
4608}
Thierry Strudel3d639192016-09-09 11:52:26 -07004609
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004610status_t QCamera3HardwareInterface::fillPbStreamConfig(
4611 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4612 QCamera3Channel *channel, uint32_t streamIndex) {
4613 if (config == nullptr) {
4614 LOGE("%s: config is null", __FUNCTION__);
4615 return BAD_VALUE;
4616 }
4617
4618 if (channel == nullptr) {
4619 LOGE("%s: channel is null", __FUNCTION__);
4620 return BAD_VALUE;
4621 }
4622
4623 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4624 if (stream == nullptr) {
4625 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4626 return NAME_NOT_FOUND;
4627 }
4628
4629 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4630 if (streamInfo == nullptr) {
4631 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4632 return NAME_NOT_FOUND;
4633 }
4634
4635 config->id = pbStreamId;
4636 config->image.width = streamInfo->dim.width;
4637 config->image.height = streamInfo->dim.height;
4638 config->image.padding = 0;
4639 config->image.format = pbStreamFormat;
4640
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004641 uint32_t totalPlaneSize = 0;
4642
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004643 // Fill plane information.
4644 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4645 pbcamera::PlaneConfiguration plane;
4646 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4647 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4648 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004649
4650 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004651 }
4652
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004653 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004654 return OK;
4655}
4656
Thierry Strudel3d639192016-09-09 11:52:26 -07004657/*===========================================================================
4658 * FUNCTION : processCaptureRequest
4659 *
4660 * DESCRIPTION: process a capture request from camera service
4661 *
4662 * PARAMETERS :
4663 * @request : request from framework to process
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004669 camera3_capture_request_t *request,
4670 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004671{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004672 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004673 int rc = NO_ERROR;
4674 int32_t request_id;
4675 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004676 bool isVidBufRequested = false;
4677 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004678 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004679
4680 pthread_mutex_lock(&mMutex);
4681
4682 // Validate current state
4683 switch (mState) {
4684 case CONFIGURED:
4685 case STARTED:
4686 /* valid state */
4687 break;
4688
4689 case ERROR:
4690 pthread_mutex_unlock(&mMutex);
4691 handleCameraDeviceError();
4692 return -ENODEV;
4693
4694 default:
4695 LOGE("Invalid state %d", mState);
4696 pthread_mutex_unlock(&mMutex);
4697 return -ENODEV;
4698 }
4699
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004700 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004701 if (rc != NO_ERROR) {
4702 LOGE("incoming request is not valid");
4703 pthread_mutex_unlock(&mMutex);
4704 return rc;
4705 }
4706
4707 meta = request->settings;
4708
4709 // For first capture request, send capture intent, and
4710 // stream on all streams
4711 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004712 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004713 // send an unconfigure to the backend so that the isp
4714 // resources are deallocated
4715 if (!mFirstConfiguration) {
4716 cam_stream_size_info_t stream_config_info;
4717 int32_t hal_version = CAM_HAL_V3;
4718 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4719 stream_config_info.buffer_info.min_buffers =
4720 MIN_INFLIGHT_REQUESTS;
4721 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004722 m_bIs4KVideo ? 0 :
4723 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004724 clear_metadata_buffer(mParameters);
4725 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4726 CAM_INTF_PARM_HAL_VERSION, hal_version);
4727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4728 CAM_INTF_META_STREAM_INFO, stream_config_info);
4729 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4730 mParameters);
4731 if (rc < 0) {
4732 LOGE("set_parms for unconfigure failed");
4733 pthread_mutex_unlock(&mMutex);
4734 return rc;
4735 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004736
Thierry Strudel3d639192016-09-09 11:52:26 -07004737 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004738 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004739 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004740 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004741 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004742 property_get("persist.camera.is_type", is_type_value, "4");
4743 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4744 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4745 property_get("persist.camera.is_type_preview", is_type_value, "4");
4746 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4747 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004748
4749 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4750 int32_t hal_version = CAM_HAL_V3;
4751 uint8_t captureIntent =
4752 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4753 mCaptureIntent = captureIntent;
4754 clear_metadata_buffer(mParameters);
4755 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4756 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4757 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004758 if (mFirstConfiguration) {
4759 // configure instant AEC
4760 // Instant AEC is a session based parameter and it is needed only
4761 // once per complete session after open camera.
4762 // i.e. This is set only once for the first capture request, after open camera.
4763 setInstantAEC(meta);
4764 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004765 uint8_t fwkVideoStabMode=0;
4766 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4767 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4768 }
4769
Xue Tuecac74e2017-04-17 13:58:15 -07004770 // If EIS setprop is enabled then only turn it on for video/preview
4771 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004772 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 int32_t vsMode;
4774 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4776 rc = BAD_VALUE;
4777 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 LOGD("setEis %d", setEis);
4779 bool eis3Supported = false;
4780 size_t count = IS_TYPE_MAX;
4781 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4782 for (size_t i = 0; i < count; i++) {
4783 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4784 eis3Supported = true;
4785 break;
4786 }
4787 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004788
4789 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004790 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4792 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004793 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4794 is_type = isTypePreview;
4795 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4796 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4797 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004799 } else {
4800 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004801 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004802 } else {
4803 is_type = IS_TYPE_NONE;
4804 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004806 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004807 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4808 }
4809 }
4810
4811 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4812 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4813
Thierry Strudel54dc9782017-02-15 12:12:10 -08004814 //Disable tintless only if the property is set to 0
4815 memset(prop, 0, sizeof(prop));
4816 property_get("persist.camera.tintless.enable", prop, "1");
4817 int32_t tintless_value = atoi(prop);
4818
Thierry Strudel3d639192016-09-09 11:52:26 -07004819 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4820 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004821
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 //Disable CDS for HFR mode or if DIS/EIS is on.
4823 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4824 //after every configure_stream
4825 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4826 (m_bIsVideo)) {
4827 int32_t cds = CAM_CDS_MODE_OFF;
4828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4829 CAM_INTF_PARM_CDS_MODE, cds))
4830 LOGE("Failed to disable CDS for HFR mode");
4831
4832 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004833
4834 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4835 uint8_t* use_av_timer = NULL;
4836
4837 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004838 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004839 use_av_timer = &m_debug_avtimer;
4840 }
4841 else{
4842 use_av_timer =
4843 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004844 if (use_av_timer) {
4845 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4846 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 }
4848
4849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4850 rc = BAD_VALUE;
4851 }
4852 }
4853
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 setMobicat();
4855
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004856 uint8_t nrMode = 0;
4857 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4858 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4859 }
4860
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 /* Set fps and hfr mode while sending meta stream info so that sensor
4862 * can configure appropriate streaming mode */
4863 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4865 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4867 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004868 if (rc == NO_ERROR) {
4869 int32_t max_fps =
4870 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004871 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004872 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4873 }
4874 /* For HFR, more buffers are dequeued upfront to improve the performance */
4875 if (mBatchSize) {
4876 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4877 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4878 }
4879 }
4880 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004881 LOGE("setHalFpsRange failed");
4882 }
4883 }
4884 if (meta.exists(ANDROID_CONTROL_MODE)) {
4885 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4886 rc = extractSceneMode(meta, metaMode, mParameters);
4887 if (rc != NO_ERROR) {
4888 LOGE("extractSceneMode failed");
4889 }
4890 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004891 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004892
Thierry Strudel04e026f2016-10-10 11:27:36 -07004893 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4894 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4895 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4896 rc = setVideoHdrMode(mParameters, vhdr);
4897 if (rc != NO_ERROR) {
4898 LOGE("setVideoHDR is failed");
4899 }
4900 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004901
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004902 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004903 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004904 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004905 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4907 sensorModeFullFov)) {
4908 rc = BAD_VALUE;
4909 }
4910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 //TODO: validate the arguments, HSV scenemode should have only the
4912 //advertised fps ranges
4913
4914 /*set the capture intent, hal version, tintless, stream info,
4915 *and disenable parameters to the backend*/
4916 LOGD("set_parms META_STREAM_INFO " );
4917 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004918 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4919 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 mStreamConfigInfo.type[i],
4921 mStreamConfigInfo.stream_sizes[i].width,
4922 mStreamConfigInfo.stream_sizes[i].height,
4923 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004924 mStreamConfigInfo.format[i],
4925 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004926 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004927
Thierry Strudel3d639192016-09-09 11:52:26 -07004928 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4929 mParameters);
4930 if (rc < 0) {
4931 LOGE("set_parms failed for hal version, stream info");
4932 }
4933
Chien-Yu Chenee335912017-02-09 17:53:20 -08004934 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4935 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004936 if (rc != NO_ERROR) {
4937 LOGE("Failed to get sensor output size");
4938 pthread_mutex_unlock(&mMutex);
4939 goto error_exit;
4940 }
4941
4942 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4943 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004944 mSensorModeInfo.active_array_size.width,
4945 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004946
4947 /* Set batchmode before initializing channel. Since registerBuffer
4948 * internally initializes some of the channels, better set batchmode
4949 * even before first register buffer */
4950 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4951 it != mStreamInfo.end(); it++) {
4952 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4953 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4954 && mBatchSize) {
4955 rc = channel->setBatchSize(mBatchSize);
4956 //Disable per frame map unmap for HFR/batchmode case
4957 rc |= channel->setPerFrameMapUnmap(false);
4958 if (NO_ERROR != rc) {
4959 LOGE("Channel init failed %d", rc);
4960 pthread_mutex_unlock(&mMutex);
4961 goto error_exit;
4962 }
4963 }
4964 }
4965
4966 //First initialize all streams
4967 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4968 it != mStreamInfo.end(); it++) {
4969 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004970
4971 /* Initial value of NR mode is needed before stream on */
4972 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07004973 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4974 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004975 setEis) {
4976 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4977 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4978 is_type = mStreamConfigInfo.is_type[i];
4979 break;
4980 }
4981 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004982 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 rc = channel->initialize(IS_TYPE_NONE);
4985 }
4986 if (NO_ERROR != rc) {
4987 LOGE("Channel initialization failed %d", rc);
4988 pthread_mutex_unlock(&mMutex);
4989 goto error_exit;
4990 }
4991 }
4992
4993 if (mRawDumpChannel) {
4994 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4995 if (rc != NO_ERROR) {
4996 LOGE("Error: Raw Dump Channel init failed");
4997 pthread_mutex_unlock(&mMutex);
4998 goto error_exit;
4999 }
5000 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005001 if (mHdrPlusRawSrcChannel) {
5002 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5003 if (rc != NO_ERROR) {
5004 LOGE("Error: HDR+ RAW Source Channel init failed");
5005 pthread_mutex_unlock(&mMutex);
5006 goto error_exit;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 if (mSupportChannel) {
5010 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5011 if (rc < 0) {
5012 LOGE("Support channel initialization failed");
5013 pthread_mutex_unlock(&mMutex);
5014 goto error_exit;
5015 }
5016 }
5017 if (mAnalysisChannel) {
5018 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5019 if (rc < 0) {
5020 LOGE("Analysis channel initialization failed");
5021 pthread_mutex_unlock(&mMutex);
5022 goto error_exit;
5023 }
5024 }
5025 if (mDummyBatchChannel) {
5026 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5027 if (rc < 0) {
5028 LOGE("mDummyBatchChannel setBatchSize failed");
5029 pthread_mutex_unlock(&mMutex);
5030 goto error_exit;
5031 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 if (rc < 0) {
5034 LOGE("mDummyBatchChannel initialization failed");
5035 pthread_mutex_unlock(&mMutex);
5036 goto error_exit;
5037 }
5038 }
5039
5040 // Set bundle info
5041 rc = setBundleInfo();
5042 if (rc < 0) {
5043 LOGE("setBundleInfo failed %d", rc);
5044 pthread_mutex_unlock(&mMutex);
5045 goto error_exit;
5046 }
5047
5048 //update settings from app here
5049 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5050 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5051 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5052 }
5053 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5054 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5055 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5056 }
5057 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5058 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5059 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5060
5061 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5062 (mLinkedCameraId != mCameraId) ) {
5063 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5064 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005066 goto error_exit;
5067 }
5068 }
5069
5070 // add bundle related cameras
5071 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5072 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005073 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5074 &m_pDualCamCmdPtr->bundle_info;
5075 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 if (mIsDeviceLinked)
5077 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5078 else
5079 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5080
5081 pthread_mutex_lock(&gCamLock);
5082
5083 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5084 LOGE("Dualcam: Invalid Session Id ");
5085 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005086 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005087 goto error_exit;
5088 }
5089
5090 if (mIsMainCamera == 1) {
5091 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5092 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005093 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005094 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 // related session id should be session id of linked session
5096 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5097 } else {
5098 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5099 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005100 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005101 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005102 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5103 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005104 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005105 pthread_mutex_unlock(&gCamLock);
5106
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005107 rc = mCameraHandle->ops->set_dual_cam_cmd(
5108 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 if (rc < 0) {
5110 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005111 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 goto error_exit;
5113 }
5114 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005115 goto no_error;
5116error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005117 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 return rc;
5119no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 mWokenUpByDaemon = false;
5121 mPendingLiveRequest = 0;
5122 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 }
5124
5125 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005126 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005127
5128 if (mFlushPerf) {
5129 //we cannot accept any requests during flush
5130 LOGE("process_capture_request cannot proceed during flush");
5131 pthread_mutex_unlock(&mMutex);
5132 return NO_ERROR; //should return an error
5133 }
5134
5135 if (meta.exists(ANDROID_REQUEST_ID)) {
5136 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5137 mCurrentRequestId = request_id;
5138 LOGD("Received request with id: %d", request_id);
5139 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5140 LOGE("Unable to find request id field, \
5141 & no previous id available");
5142 pthread_mutex_unlock(&mMutex);
5143 return NAME_NOT_FOUND;
5144 } else {
5145 LOGD("Re-using old request id");
5146 request_id = mCurrentRequestId;
5147 }
5148
5149 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5150 request->num_output_buffers,
5151 request->input_buffer,
5152 frameNumber);
5153 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005154 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005156 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 uint32_t snapshotStreamId = 0;
5158 for (size_t i = 0; i < request->num_output_buffers; i++) {
5159 const camera3_stream_buffer_t& output = request->output_buffers[i];
5160 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5161
Emilian Peev7650c122017-01-19 08:24:33 -08005162 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5163 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005164 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 blob_request = 1;
5166 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5167 }
5168
5169 if (output.acquire_fence != -1) {
5170 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5171 close(output.acquire_fence);
5172 if (rc != OK) {
5173 LOGE("sync wait failed %d", rc);
5174 pthread_mutex_unlock(&mMutex);
5175 return rc;
5176 }
5177 }
5178
Emilian Peev0f3c3162017-03-15 12:57:46 +00005179 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5180 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005181 depthRequestPresent = true;
5182 continue;
5183 }
5184
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005185 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005186 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005187
5188 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5189 isVidBufRequested = true;
5190 }
5191 }
5192
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005193 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5194 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5195 itr++) {
5196 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5197 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5198 channel->getStreamID(channel->getStreamTypeMask());
5199
5200 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5201 isVidBufRequested = true;
5202 }
5203 }
5204
Thierry Strudel3d639192016-09-09 11:52:26 -07005205 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005206 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 }
5209 if (blob_request && mRawDumpChannel) {
5210 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005211 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005212 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005213 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 }
5215
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005216 {
5217 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5218 // Request a RAW buffer if
5219 // 1. mHdrPlusRawSrcChannel is valid.
5220 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5221 // 3. There is no pending HDR+ request.
5222 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5223 mHdrPlusPendingRequests.size() == 0) {
5224 streamsArray.stream_request[streamsArray.num_streams].streamID =
5225 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5226 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5227 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005228 }
5229
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005230 //extract capture intent
5231 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5232 mCaptureIntent =
5233 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5234 }
5235
5236 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5237 mCacMode =
5238 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5239 }
5240
5241 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005242 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005243
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005244 {
5245 Mutex::Autolock l(gHdrPlusClientLock);
5246 // If this request has a still capture intent, try to submit an HDR+ request.
5247 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5248 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5249 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5250 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005251 }
5252
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005253 if (hdrPlusRequest) {
5254 // For a HDR+ request, just set the frame parameters.
5255 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5256 if (rc < 0) {
5257 LOGE("fail to set frame parameters");
5258 pthread_mutex_unlock(&mMutex);
5259 return rc;
5260 }
5261 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 /* Parse the settings:
5263 * - For every request in NORMAL MODE
5264 * - For every request in HFR mode during preview only case
5265 * - For first request of every batch in HFR mode during video
5266 * recording. In batchmode the same settings except frame number is
5267 * repeated in each request of the batch.
5268 */
5269 if (!mBatchSize ||
5270 (mBatchSize && !isVidBufRequested) ||
5271 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005272 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005273 if (rc < 0) {
5274 LOGE("fail to set frame parameters");
5275 pthread_mutex_unlock(&mMutex);
5276 return rc;
5277 }
5278 }
5279 /* For batchMode HFR, setFrameParameters is not called for every
5280 * request. But only frame number of the latest request is parsed.
5281 * Keep track of first and last frame numbers in a batch so that
5282 * metadata for the frame numbers of batch can be duplicated in
5283 * handleBatchMetadta */
5284 if (mBatchSize) {
5285 if (!mToBeQueuedVidBufs) {
5286 //start of the batch
5287 mFirstFrameNumberInBatch = request->frame_number;
5288 }
5289 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5290 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5291 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005292 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 return BAD_VALUE;
5294 }
5295 }
5296 if (mNeedSensorRestart) {
5297 /* Unlock the mutex as restartSensor waits on the channels to be
5298 * stopped, which in turn calls stream callback functions -
5299 * handleBufferWithLock and handleMetadataWithLock */
5300 pthread_mutex_unlock(&mMutex);
5301 rc = dynamicUpdateMetaStreamInfo();
5302 if (rc != NO_ERROR) {
5303 LOGE("Restarting the sensor failed");
5304 return BAD_VALUE;
5305 }
5306 mNeedSensorRestart = false;
5307 pthread_mutex_lock(&mMutex);
5308 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005309 if(mResetInstantAEC) {
5310 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5311 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5312 mResetInstantAEC = false;
5313 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005314 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 if (request->input_buffer->acquire_fence != -1) {
5316 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5317 close(request->input_buffer->acquire_fence);
5318 if (rc != OK) {
5319 LOGE("input buffer sync wait failed %d", rc);
5320 pthread_mutex_unlock(&mMutex);
5321 return rc;
5322 }
5323 }
5324 }
5325
5326 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5327 mLastCustIntentFrmNum = frameNumber;
5328 }
5329 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 pendingRequestIterator latestRequest;
5332 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005333 pendingRequest.num_buffers = depthRequestPresent ?
5334 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005335 pendingRequest.request_id = request_id;
5336 pendingRequest.blob_request = blob_request;
5337 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 if (request->input_buffer) {
5339 pendingRequest.input_buffer =
5340 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5341 *(pendingRequest.input_buffer) = *(request->input_buffer);
5342 pInputBuffer = pendingRequest.input_buffer;
5343 } else {
5344 pendingRequest.input_buffer = NULL;
5345 pInputBuffer = NULL;
5346 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005347 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005348
5349 pendingRequest.pipeline_depth = 0;
5350 pendingRequest.partial_result_cnt = 0;
5351 extractJpegMetadata(mCurJpegMeta, request);
5352 pendingRequest.jpegMetadata = mCurJpegMeta;
5353 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005354 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005355 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5356 mHybridAeEnable =
5357 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5358 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005359
5360 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5361 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005362 /* DevCamDebug metadata processCaptureRequest */
5363 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5364 mDevCamDebugMetaEnable =
5365 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5366 }
5367 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5368 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005369
5370 //extract CAC info
5371 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5372 mCacMode =
5373 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5374 }
5375 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005376 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005377
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005378 // extract enableZsl info
5379 if (gExposeEnableZslKey) {
5380 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5381 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5382 mZslEnabled = pendingRequest.enableZsl;
5383 } else {
5384 pendingRequest.enableZsl = mZslEnabled;
5385 }
5386 }
5387
Thierry Strudel3d639192016-09-09 11:52:26 -07005388 PendingBuffersInRequest bufsForCurRequest;
5389 bufsForCurRequest.frame_number = frameNumber;
5390 // Mark current timestamp for the new request
5391 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005392 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005393
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005394 if (hdrPlusRequest) {
5395 // Save settings for this request.
5396 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5397 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5398
5399 // Add to pending HDR+ request queue.
5400 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5401 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5402
5403 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5404 }
5405
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005407 if ((request->output_buffers[i].stream->data_space ==
5408 HAL_DATASPACE_DEPTH) &&
5409 (HAL_PIXEL_FORMAT_BLOB ==
5410 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005411 continue;
5412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005413 RequestedBufferInfo requestedBuf;
5414 memset(&requestedBuf, 0, sizeof(requestedBuf));
5415 requestedBuf.stream = request->output_buffers[i].stream;
5416 requestedBuf.buffer = NULL;
5417 pendingRequest.buffers.push_back(requestedBuf);
5418
5419 // Add to buffer handle the pending buffers list
5420 PendingBufferInfo bufferInfo;
5421 bufferInfo.buffer = request->output_buffers[i].buffer;
5422 bufferInfo.stream = request->output_buffers[i].stream;
5423 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5424 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5425 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5426 frameNumber, bufferInfo.buffer,
5427 channel->getStreamTypeMask(), bufferInfo.stream->format);
5428 }
5429 // Add this request packet into mPendingBuffersMap
5430 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5431 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5432 mPendingBuffersMap.get_num_overall_buffers());
5433
5434 latestRequest = mPendingRequestsList.insert(
5435 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005436
5437 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5438 // for the frame number.
5439 mShutterDispatcher.expectShutter(frameNumber);
5440 for (size_t i = 0; i < request->num_output_buffers; i++) {
5441 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5442 }
5443
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 if(mFlush) {
5445 LOGI("mFlush is true");
5446 pthread_mutex_unlock(&mMutex);
5447 return NO_ERROR;
5448 }
5449
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005450 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5451 // channel.
5452 if (!hdrPlusRequest) {
5453 int indexUsed;
5454 // Notify metadata channel we receive a request
5455 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005456
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005457 if(request->input_buffer != NULL){
5458 LOGD("Input request, frame_number %d", frameNumber);
5459 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5460 if (NO_ERROR != rc) {
5461 LOGE("fail to set reproc parameters");
5462 pthread_mutex_unlock(&mMutex);
5463 return rc;
5464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005465 }
5466
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005467 // Call request on other streams
5468 uint32_t streams_need_metadata = 0;
5469 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5470 for (size_t i = 0; i < request->num_output_buffers; i++) {
5471 const camera3_stream_buffer_t& output = request->output_buffers[i];
5472 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5473
5474 if (channel == NULL) {
5475 LOGW("invalid channel pointer for stream");
5476 continue;
5477 }
5478
5479 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5480 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5481 output.buffer, request->input_buffer, frameNumber);
5482 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005483 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005484 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5485 if (rc < 0) {
5486 LOGE("Fail to request on picture channel");
5487 pthread_mutex_unlock(&mMutex);
5488 return rc;
5489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005490 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005491 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5492 assert(NULL != mDepthChannel);
5493 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005494
Emilian Peev7650c122017-01-19 08:24:33 -08005495 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5496 if (rc < 0) {
5497 LOGE("Fail to map on depth buffer");
5498 pthread_mutex_unlock(&mMutex);
5499 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005500 }
Emilian Peev7650c122017-01-19 08:24:33 -08005501 } else {
5502 LOGD("snapshot request with buffer %p, frame_number %d",
5503 output.buffer, frameNumber);
5504 if (!request->settings) {
5505 rc = channel->request(output.buffer, frameNumber,
5506 NULL, mPrevParameters, indexUsed);
5507 } else {
5508 rc = channel->request(output.buffer, frameNumber,
5509 NULL, mParameters, indexUsed);
5510 }
5511 if (rc < 0) {
5512 LOGE("Fail to request on picture channel");
5513 pthread_mutex_unlock(&mMutex);
5514 return rc;
5515 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005516
Emilian Peev7650c122017-01-19 08:24:33 -08005517 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5518 uint32_t j = 0;
5519 for (j = 0; j < streamsArray.num_streams; j++) {
5520 if (streamsArray.stream_request[j].streamID == streamId) {
5521 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5522 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5523 else
5524 streamsArray.stream_request[j].buf_index = indexUsed;
5525 break;
5526 }
5527 }
5528 if (j == streamsArray.num_streams) {
5529 LOGE("Did not find matching stream to update index");
5530 assert(0);
5531 }
5532
5533 pendingBufferIter->need_metadata = true;
5534 streams_need_metadata++;
5535 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005536 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005537 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5538 bool needMetadata = false;
5539 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5540 rc = yuvChannel->request(output.buffer, frameNumber,
5541 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5542 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005543 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005544 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005545 pthread_mutex_unlock(&mMutex);
5546 return rc;
5547 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005548
5549 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5550 uint32_t j = 0;
5551 for (j = 0; j < streamsArray.num_streams; j++) {
5552 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005553 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5554 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5555 else
5556 streamsArray.stream_request[j].buf_index = indexUsed;
5557 break;
5558 }
5559 }
5560 if (j == streamsArray.num_streams) {
5561 LOGE("Did not find matching stream to update index");
5562 assert(0);
5563 }
5564
5565 pendingBufferIter->need_metadata = needMetadata;
5566 if (needMetadata)
5567 streams_need_metadata += 1;
5568 LOGD("calling YUV channel request, need_metadata is %d",
5569 needMetadata);
5570 } else {
5571 LOGD("request with buffer %p, frame_number %d",
5572 output.buffer, frameNumber);
5573
5574 rc = channel->request(output.buffer, frameNumber, indexUsed);
5575
5576 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5577 uint32_t j = 0;
5578 for (j = 0; j < streamsArray.num_streams; j++) {
5579 if (streamsArray.stream_request[j].streamID == streamId) {
5580 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5581 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5582 else
5583 streamsArray.stream_request[j].buf_index = indexUsed;
5584 break;
5585 }
5586 }
5587 if (j == streamsArray.num_streams) {
5588 LOGE("Did not find matching stream to update index");
5589 assert(0);
5590 }
5591
5592 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5593 && mBatchSize) {
5594 mToBeQueuedVidBufs++;
5595 if (mToBeQueuedVidBufs == mBatchSize) {
5596 channel->queueBatchBuf();
5597 }
5598 }
5599 if (rc < 0) {
5600 LOGE("request failed");
5601 pthread_mutex_unlock(&mMutex);
5602 return rc;
5603 }
5604 }
5605 pendingBufferIter++;
5606 }
5607
5608 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5609 itr++) {
5610 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5611
5612 if (channel == NULL) {
5613 LOGE("invalid channel pointer for stream");
5614 assert(0);
5615 return BAD_VALUE;
5616 }
5617
5618 InternalRequest requestedStream;
5619 requestedStream = (*itr);
5620
5621
5622 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5623 LOGD("snapshot request internally input buffer %p, frame_number %d",
5624 request->input_buffer, frameNumber);
5625 if(request->input_buffer != NULL){
5626 rc = channel->request(NULL, frameNumber,
5627 pInputBuffer, &mReprocMeta, indexUsed, true,
5628 requestedStream.meteringOnly);
5629 if (rc < 0) {
5630 LOGE("Fail to request on picture channel");
5631 pthread_mutex_unlock(&mMutex);
5632 return rc;
5633 }
5634 } else {
5635 LOGD("snapshot request with frame_number %d", frameNumber);
5636 if (!request->settings) {
5637 rc = channel->request(NULL, frameNumber,
5638 NULL, mPrevParameters, indexUsed, true,
5639 requestedStream.meteringOnly);
5640 } else {
5641 rc = channel->request(NULL, frameNumber,
5642 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5643 }
5644 if (rc < 0) {
5645 LOGE("Fail to request on picture channel");
5646 pthread_mutex_unlock(&mMutex);
5647 return rc;
5648 }
5649
5650 if ((*itr).meteringOnly != 1) {
5651 requestedStream.need_metadata = 1;
5652 streams_need_metadata++;
5653 }
5654 }
5655
5656 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5657 uint32_t j = 0;
5658 for (j = 0; j < streamsArray.num_streams; j++) {
5659 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005660 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5661 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5662 else
5663 streamsArray.stream_request[j].buf_index = indexUsed;
5664 break;
5665 }
5666 }
5667 if (j == streamsArray.num_streams) {
5668 LOGE("Did not find matching stream to update index");
5669 assert(0);
5670 }
5671
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005672 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005673 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005674 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005675 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005676 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005677 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005678 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 //If 2 streams have need_metadata set to true, fail the request, unless
5681 //we copy/reference count the metadata buffer
5682 if (streams_need_metadata > 1) {
5683 LOGE("not supporting request in which two streams requires"
5684 " 2 HAL metadata for reprocessing");
5685 pthread_mutex_unlock(&mMutex);
5686 return -EINVAL;
5687 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005688
Emilian Peev7650c122017-01-19 08:24:33 -08005689 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5691 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5692 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5693 pthread_mutex_unlock(&mMutex);
5694 return BAD_VALUE;
5695 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005696 if (request->input_buffer == NULL) {
5697 /* Set the parameters to backend:
5698 * - For every request in NORMAL MODE
5699 * - For every request in HFR mode during preview only case
5700 * - Once every batch in HFR mode during video recording
5701 */
5702 if (!mBatchSize ||
5703 (mBatchSize && !isVidBufRequested) ||
5704 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5705 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5706 mBatchSize, isVidBufRequested,
5707 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005708
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005709 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5710 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5711 uint32_t m = 0;
5712 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5713 if (streamsArray.stream_request[k].streamID ==
5714 mBatchedStreamsArray.stream_request[m].streamID)
5715 break;
5716 }
5717 if (m == mBatchedStreamsArray.num_streams) {
5718 mBatchedStreamsArray.stream_request\
5719 [mBatchedStreamsArray.num_streams].streamID =
5720 streamsArray.stream_request[k].streamID;
5721 mBatchedStreamsArray.stream_request\
5722 [mBatchedStreamsArray.num_streams].buf_index =
5723 streamsArray.stream_request[k].buf_index;
5724 mBatchedStreamsArray.num_streams =
5725 mBatchedStreamsArray.num_streams + 1;
5726 }
5727 }
5728 streamsArray = mBatchedStreamsArray;
5729 }
5730 /* Update stream id of all the requested buffers */
5731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5732 streamsArray)) {
5733 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005734 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005735 return BAD_VALUE;
5736 }
5737
5738 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5739 mParameters);
5740 if (rc < 0) {
5741 LOGE("set_parms failed");
5742 }
5743 /* reset to zero coz, the batch is queued */
5744 mToBeQueuedVidBufs = 0;
5745 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5746 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5747 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005748 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5749 uint32_t m = 0;
5750 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5751 if (streamsArray.stream_request[k].streamID ==
5752 mBatchedStreamsArray.stream_request[m].streamID)
5753 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005754 }
5755 if (m == mBatchedStreamsArray.num_streams) {
5756 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5757 streamID = streamsArray.stream_request[k].streamID;
5758 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5759 buf_index = streamsArray.stream_request[k].buf_index;
5760 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762 }
5763 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005764 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005765
5766 // Start all streams after the first setting is sent, so that the
5767 // setting can be applied sooner: (0 + apply_delay)th frame.
5768 if (mState == CONFIGURED && mChannelHandle) {
5769 //Then start them.
5770 LOGH("Start META Channel");
5771 rc = mMetadataChannel->start();
5772 if (rc < 0) {
5773 LOGE("META channel start failed");
5774 pthread_mutex_unlock(&mMutex);
5775 return rc;
5776 }
5777
5778 if (mAnalysisChannel) {
5779 rc = mAnalysisChannel->start();
5780 if (rc < 0) {
5781 LOGE("Analysis channel start failed");
5782 mMetadataChannel->stop();
5783 pthread_mutex_unlock(&mMutex);
5784 return rc;
5785 }
5786 }
5787
5788 if (mSupportChannel) {
5789 rc = mSupportChannel->start();
5790 if (rc < 0) {
5791 LOGE("Support channel start failed");
5792 mMetadataChannel->stop();
5793 /* Although support and analysis are mutually exclusive today
5794 adding it in anycase for future proofing */
5795 if (mAnalysisChannel) {
5796 mAnalysisChannel->stop();
5797 }
5798 pthread_mutex_unlock(&mMutex);
5799 return rc;
5800 }
5801 }
5802 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5803 it != mStreamInfo.end(); it++) {
5804 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5805 LOGH("Start Processing Channel mask=%d",
5806 channel->getStreamTypeMask());
5807 rc = channel->start();
5808 if (rc < 0) {
5809 LOGE("channel start failed");
5810 pthread_mutex_unlock(&mMutex);
5811 return rc;
5812 }
5813 }
5814
5815 if (mRawDumpChannel) {
5816 LOGD("Starting raw dump stream");
5817 rc = mRawDumpChannel->start();
5818 if (rc != NO_ERROR) {
5819 LOGE("Error Starting Raw Dump Channel");
5820 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5821 it != mStreamInfo.end(); it++) {
5822 QCamera3Channel *channel =
5823 (QCamera3Channel *)(*it)->stream->priv;
5824 LOGH("Stopping Processing Channel mask=%d",
5825 channel->getStreamTypeMask());
5826 channel->stop();
5827 }
5828 if (mSupportChannel)
5829 mSupportChannel->stop();
5830 if (mAnalysisChannel) {
5831 mAnalysisChannel->stop();
5832 }
5833 mMetadataChannel->stop();
5834 pthread_mutex_unlock(&mMutex);
5835 return rc;
5836 }
5837 }
5838
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005839 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005840 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005841 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005842 if (rc != NO_ERROR) {
5843 LOGE("start_channel failed %d", rc);
5844 pthread_mutex_unlock(&mMutex);
5845 return rc;
5846 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005847
5848 {
5849 // Configure Easel for stream on.
5850 Mutex::Autolock l(gHdrPlusClientLock);
5851 if (EaselManagerClientOpened) {
5852 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5853 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5854 if (rc != OK) {
5855 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5856 mCameraId, mSensorModeInfo.op_pixel_clk);
5857 pthread_mutex_unlock(&mMutex);
5858 return rc;
5859 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005860 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005861 }
5862 }
5863
5864 // Start sensor streaming.
5865 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5866 mChannelHandle);
5867 if (rc != NO_ERROR) {
5868 LOGE("start_sensor_stream_on failed %d", rc);
5869 pthread_mutex_unlock(&mMutex);
5870 return rc;
5871 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005872 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005873 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005874 }
5875
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005876 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00005877 {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005878 Mutex::Autolock l(gHdrPlusClientLock);
5879 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5880 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5881 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5882 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5883 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5884 rc = enableHdrPlusModeLocked();
5885 if (rc != OK) {
5886 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5887 pthread_mutex_unlock(&mMutex);
5888 return rc;
5889 }
5890
5891 mFirstPreviewIntentSeen = true;
5892 }
5893 }
5894
Thierry Strudel3d639192016-09-09 11:52:26 -07005895 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5896
5897 mState = STARTED;
5898 // Added a timed condition wait
5899 struct timespec ts;
5900 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005901 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005902 if (rc < 0) {
5903 isValidTimeout = 0;
5904 LOGE("Error reading the real time clock!!");
5905 }
5906 else {
5907 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005908 int64_t timeout = 5;
5909 {
5910 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5911 // If there is a pending HDR+ request, the following requests may be blocked until the
5912 // HDR+ request is done. So allow a longer timeout.
5913 if (mHdrPlusPendingRequests.size() > 0) {
5914 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5915 }
5916 }
5917 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005918 }
5919 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005920 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005921 (mState != ERROR) && (mState != DEINIT)) {
5922 if (!isValidTimeout) {
5923 LOGD("Blocking on conditional wait");
5924 pthread_cond_wait(&mRequestCond, &mMutex);
5925 }
5926 else {
5927 LOGD("Blocking on timed conditional wait");
5928 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5929 if (rc == ETIMEDOUT) {
5930 rc = -ENODEV;
5931 LOGE("Unblocked on timeout!!!!");
5932 break;
5933 }
5934 }
5935 LOGD("Unblocked");
5936 if (mWokenUpByDaemon) {
5937 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005938 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005939 break;
5940 }
5941 }
5942 pthread_mutex_unlock(&mMutex);
5943
5944 return rc;
5945}
5946
5947/*===========================================================================
5948 * FUNCTION : dump
5949 *
5950 * DESCRIPTION:
5951 *
5952 * PARAMETERS :
5953 *
5954 *
5955 * RETURN :
5956 *==========================================================================*/
5957void QCamera3HardwareInterface::dump(int fd)
5958{
5959 pthread_mutex_lock(&mMutex);
5960 dprintf(fd, "\n Camera HAL3 information Begin \n");
5961
5962 dprintf(fd, "\nNumber of pending requests: %zu \n",
5963 mPendingRequestsList.size());
5964 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5965 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5966 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5967 for(pendingRequestIterator i = mPendingRequestsList.begin();
5968 i != mPendingRequestsList.end(); i++) {
5969 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5970 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5971 i->input_buffer);
5972 }
5973 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5974 mPendingBuffersMap.get_num_overall_buffers());
5975 dprintf(fd, "-------+------------------\n");
5976 dprintf(fd, " Frame | Stream type mask \n");
5977 dprintf(fd, "-------+------------------\n");
5978 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5979 for(auto &j : req.mPendingBufferList) {
5980 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5981 dprintf(fd, " %5d | %11d \n",
5982 req.frame_number, channel->getStreamTypeMask());
5983 }
5984 }
5985 dprintf(fd, "-------+------------------\n");
5986
5987 dprintf(fd, "\nPending frame drop list: %zu\n",
5988 mPendingFrameDropList.size());
5989 dprintf(fd, "-------+-----------\n");
5990 dprintf(fd, " Frame | Stream ID \n");
5991 dprintf(fd, "-------+-----------\n");
5992 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5993 i != mPendingFrameDropList.end(); i++) {
5994 dprintf(fd, " %5d | %9d \n",
5995 i->frame_number, i->stream_ID);
5996 }
5997 dprintf(fd, "-------+-----------\n");
5998
5999 dprintf(fd, "\n Camera HAL3 information End \n");
6000
6001 /* use dumpsys media.camera as trigger to send update debug level event */
6002 mUpdateDebugLevel = true;
6003 pthread_mutex_unlock(&mMutex);
6004 return;
6005}
6006
6007/*===========================================================================
6008 * FUNCTION : flush
6009 *
6010 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6011 * conditionally restarts channels
6012 *
6013 * PARAMETERS :
6014 * @ restartChannels: re-start all channels
6015 *
6016 *
6017 * RETURN :
6018 * 0 on success
6019 * Error code on failure
6020 *==========================================================================*/
6021int QCamera3HardwareInterface::flush(bool restartChannels)
6022{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006023 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006024 int32_t rc = NO_ERROR;
6025
6026 LOGD("Unblocking Process Capture Request");
6027 pthread_mutex_lock(&mMutex);
6028 mFlush = true;
6029 pthread_mutex_unlock(&mMutex);
6030
6031 rc = stopAllChannels();
6032 // unlink of dualcam
6033 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006034 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6035 &m_pDualCamCmdPtr->bundle_info;
6036 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006037 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6038 pthread_mutex_lock(&gCamLock);
6039
6040 if (mIsMainCamera == 1) {
6041 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6042 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006043 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006044 // related session id should be session id of linked session
6045 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6046 } else {
6047 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6048 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006049 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006050 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6051 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006052 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006053 pthread_mutex_unlock(&gCamLock);
6054
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006055 rc = mCameraHandle->ops->set_dual_cam_cmd(
6056 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006057 if (rc < 0) {
6058 LOGE("Dualcam: Unlink failed, but still proceed to close");
6059 }
6060 }
6061
6062 if (rc < 0) {
6063 LOGE("stopAllChannels failed");
6064 return rc;
6065 }
6066 if (mChannelHandle) {
6067 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6068 mChannelHandle);
6069 }
6070
6071 // Reset bundle info
6072 rc = setBundleInfo();
6073 if (rc < 0) {
6074 LOGE("setBundleInfo failed %d", rc);
6075 return rc;
6076 }
6077
6078 // Mutex Lock
6079 pthread_mutex_lock(&mMutex);
6080
6081 // Unblock process_capture_request
6082 mPendingLiveRequest = 0;
6083 pthread_cond_signal(&mRequestCond);
6084
6085 rc = notifyErrorForPendingRequests();
6086 if (rc < 0) {
6087 LOGE("notifyErrorForPendingRequests failed");
6088 pthread_mutex_unlock(&mMutex);
6089 return rc;
6090 }
6091
6092 mFlush = false;
6093
6094 // Start the Streams/Channels
6095 if (restartChannels) {
6096 rc = startAllChannels();
6097 if (rc < 0) {
6098 LOGE("startAllChannels failed");
6099 pthread_mutex_unlock(&mMutex);
6100 return rc;
6101 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006102 if (mChannelHandle) {
6103 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006104 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006105 if (rc < 0) {
6106 LOGE("start_channel failed");
6107 pthread_mutex_unlock(&mMutex);
6108 return rc;
6109 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006110 }
6111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006112 pthread_mutex_unlock(&mMutex);
6113
6114 return 0;
6115}
6116
6117/*===========================================================================
6118 * FUNCTION : flushPerf
6119 *
6120 * DESCRIPTION: This is the performance optimization version of flush that does
6121 * not use stream off, rather flushes the system
6122 *
6123 * PARAMETERS :
6124 *
6125 *
6126 * RETURN : 0 : success
6127 * -EINVAL: input is malformed (device is not valid)
6128 * -ENODEV: if the device has encountered a serious error
6129 *==========================================================================*/
6130int QCamera3HardwareInterface::flushPerf()
6131{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006132 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 int32_t rc = 0;
6134 struct timespec timeout;
6135 bool timed_wait = false;
6136
6137 pthread_mutex_lock(&mMutex);
6138 mFlushPerf = true;
6139 mPendingBuffersMap.numPendingBufsAtFlush =
6140 mPendingBuffersMap.get_num_overall_buffers();
6141 LOGD("Calling flush. Wait for %d buffers to return",
6142 mPendingBuffersMap.numPendingBufsAtFlush);
6143
6144 /* send the flush event to the backend */
6145 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6146 if (rc < 0) {
6147 LOGE("Error in flush: IOCTL failure");
6148 mFlushPerf = false;
6149 pthread_mutex_unlock(&mMutex);
6150 return -ENODEV;
6151 }
6152
6153 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6154 LOGD("No pending buffers in HAL, return flush");
6155 mFlushPerf = false;
6156 pthread_mutex_unlock(&mMutex);
6157 return rc;
6158 }
6159
6160 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006161 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006162 if (rc < 0) {
6163 LOGE("Error reading the real time clock, cannot use timed wait");
6164 } else {
6165 timeout.tv_sec += FLUSH_TIMEOUT;
6166 timed_wait = true;
6167 }
6168
6169 //Block on conditional variable
6170 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6171 LOGD("Waiting on mBuffersCond");
6172 if (!timed_wait) {
6173 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6174 if (rc != 0) {
6175 LOGE("pthread_cond_wait failed due to rc = %s",
6176 strerror(rc));
6177 break;
6178 }
6179 } else {
6180 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6181 if (rc != 0) {
6182 LOGE("pthread_cond_timedwait failed due to rc = %s",
6183 strerror(rc));
6184 break;
6185 }
6186 }
6187 }
6188 if (rc != 0) {
6189 mFlushPerf = false;
6190 pthread_mutex_unlock(&mMutex);
6191 return -ENODEV;
6192 }
6193
6194 LOGD("Received buffers, now safe to return them");
6195
6196 //make sure the channels handle flush
6197 //currently only required for the picture channel to release snapshot resources
6198 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6199 it != mStreamInfo.end(); it++) {
6200 QCamera3Channel *channel = (*it)->channel;
6201 if (channel) {
6202 rc = channel->flush();
6203 if (rc) {
6204 LOGE("Flushing the channels failed with error %d", rc);
6205 // even though the channel flush failed we need to continue and
6206 // return the buffers we have to the framework, however the return
6207 // value will be an error
6208 rc = -ENODEV;
6209 }
6210 }
6211 }
6212
6213 /* notify the frameworks and send errored results */
6214 rc = notifyErrorForPendingRequests();
6215 if (rc < 0) {
6216 LOGE("notifyErrorForPendingRequests failed");
6217 pthread_mutex_unlock(&mMutex);
6218 return rc;
6219 }
6220
6221 //unblock process_capture_request
6222 mPendingLiveRequest = 0;
6223 unblockRequestIfNecessary();
6224
6225 mFlushPerf = false;
6226 pthread_mutex_unlock(&mMutex);
6227 LOGD ("Flush Operation complete. rc = %d", rc);
6228 return rc;
6229}
6230
6231/*===========================================================================
6232 * FUNCTION : handleCameraDeviceError
6233 *
6234 * DESCRIPTION: This function calls internal flush and notifies the error to
6235 * framework and updates the state variable.
6236 *
6237 * PARAMETERS : None
6238 *
6239 * RETURN : NO_ERROR on Success
6240 * Error code on failure
6241 *==========================================================================*/
6242int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6243{
6244 int32_t rc = NO_ERROR;
6245
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006246 {
6247 Mutex::Autolock lock(mFlushLock);
6248 pthread_mutex_lock(&mMutex);
6249 if (mState != ERROR) {
6250 //if mState != ERROR, nothing to be done
6251 pthread_mutex_unlock(&mMutex);
6252 return NO_ERROR;
6253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006254 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006255
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006256 rc = flush(false /* restart channels */);
6257 if (NO_ERROR != rc) {
6258 LOGE("internal flush to handle mState = ERROR failed");
6259 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006260
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006261 pthread_mutex_lock(&mMutex);
6262 mState = DEINIT;
6263 pthread_mutex_unlock(&mMutex);
6264 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006265
6266 camera3_notify_msg_t notify_msg;
6267 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6268 notify_msg.type = CAMERA3_MSG_ERROR;
6269 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6270 notify_msg.message.error.error_stream = NULL;
6271 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006272 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006273
6274 return rc;
6275}
6276
6277/*===========================================================================
6278 * FUNCTION : captureResultCb
6279 *
6280 * DESCRIPTION: Callback handler for all capture result
6281 * (streams, as well as metadata)
6282 *
6283 * PARAMETERS :
6284 * @metadata : metadata information
6285 * @buffer : actual gralloc buffer to be returned to frameworks.
6286 * NULL if metadata.
6287 *
6288 * RETURN : NONE
6289 *==========================================================================*/
6290void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6291 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6292{
6293 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006294 pthread_mutex_lock(&mMutex);
6295 uint8_t batchSize = mBatchSize;
6296 pthread_mutex_unlock(&mMutex);
6297 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006298 handleBatchMetadata(metadata_buf,
6299 true /* free_and_bufdone_meta_buf */);
6300 } else { /* mBatchSize = 0 */
6301 hdrPlusPerfLock(metadata_buf);
6302 pthread_mutex_lock(&mMutex);
6303 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006304 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006305 true /* last urgent frame of batch metadata */,
6306 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006307 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006308 pthread_mutex_unlock(&mMutex);
6309 }
6310 } else if (isInputBuffer) {
6311 pthread_mutex_lock(&mMutex);
6312 handleInputBufferWithLock(frame_number);
6313 pthread_mutex_unlock(&mMutex);
6314 } else {
6315 pthread_mutex_lock(&mMutex);
6316 handleBufferWithLock(buffer, frame_number);
6317 pthread_mutex_unlock(&mMutex);
6318 }
6319 return;
6320}
6321
6322/*===========================================================================
6323 * FUNCTION : getReprocessibleOutputStreamId
6324 *
6325 * DESCRIPTION: Get source output stream id for the input reprocess stream
6326 * based on size and format, which would be the largest
6327 * output stream if an input stream exists.
6328 *
6329 * PARAMETERS :
6330 * @id : return the stream id if found
6331 *
6332 * RETURN : int32_t type of status
6333 * NO_ERROR -- success
6334 * none-zero failure code
6335 *==========================================================================*/
6336int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6337{
6338 /* check if any output or bidirectional stream with the same size and format
6339 and return that stream */
6340 if ((mInputStreamInfo.dim.width > 0) &&
6341 (mInputStreamInfo.dim.height > 0)) {
6342 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6343 it != mStreamInfo.end(); it++) {
6344
6345 camera3_stream_t *stream = (*it)->stream;
6346 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6347 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6348 (stream->format == mInputStreamInfo.format)) {
6349 // Usage flag for an input stream and the source output stream
6350 // may be different.
6351 LOGD("Found reprocessible output stream! %p", *it);
6352 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6353 stream->usage, mInputStreamInfo.usage);
6354
6355 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6356 if (channel != NULL && channel->mStreams[0]) {
6357 id = channel->mStreams[0]->getMyServerID();
6358 return NO_ERROR;
6359 }
6360 }
6361 }
6362 } else {
6363 LOGD("No input stream, so no reprocessible output stream");
6364 }
6365 return NAME_NOT_FOUND;
6366}
6367
6368/*===========================================================================
6369 * FUNCTION : lookupFwkName
6370 *
6371 * DESCRIPTION: In case the enum is not same in fwk and backend
6372 * make sure the parameter is correctly propogated
6373 *
6374 * PARAMETERS :
6375 * @arr : map between the two enums
6376 * @len : len of the map
6377 * @hal_name : name of the hal_parm to map
6378 *
6379 * RETURN : int type of status
6380 * fwk_name -- success
6381 * none-zero failure code
6382 *==========================================================================*/
6383template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6384 size_t len, halType hal_name)
6385{
6386
6387 for (size_t i = 0; i < len; i++) {
6388 if (arr[i].hal_name == hal_name) {
6389 return arr[i].fwk_name;
6390 }
6391 }
6392
6393 /* Not able to find matching framework type is not necessarily
6394 * an error case. This happens when mm-camera supports more attributes
6395 * than the frameworks do */
6396 LOGH("Cannot find matching framework type");
6397 return NAME_NOT_FOUND;
6398}
6399
6400/*===========================================================================
6401 * FUNCTION : lookupHalName
6402 *
6403 * DESCRIPTION: In case the enum is not same in fwk and backend
6404 * make sure the parameter is correctly propogated
6405 *
6406 * PARAMETERS :
6407 * @arr : map between the two enums
6408 * @len : len of the map
6409 * @fwk_name : name of the hal_parm to map
6410 *
6411 * RETURN : int32_t type of status
6412 * hal_name -- success
6413 * none-zero failure code
6414 *==========================================================================*/
6415template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6416 size_t len, fwkType fwk_name)
6417{
6418 for (size_t i = 0; i < len; i++) {
6419 if (arr[i].fwk_name == fwk_name) {
6420 return arr[i].hal_name;
6421 }
6422 }
6423
6424 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6425 return NAME_NOT_FOUND;
6426}
6427
6428/*===========================================================================
6429 * FUNCTION : lookupProp
6430 *
6431 * DESCRIPTION: lookup a value by its name
6432 *
6433 * PARAMETERS :
6434 * @arr : map between the two enums
6435 * @len : size of the map
6436 * @name : name to be looked up
6437 *
6438 * RETURN : Value if found
6439 * CAM_CDS_MODE_MAX if not found
6440 *==========================================================================*/
6441template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6442 size_t len, const char *name)
6443{
6444 if (name) {
6445 for (size_t i = 0; i < len; i++) {
6446 if (!strcmp(arr[i].desc, name)) {
6447 return arr[i].val;
6448 }
6449 }
6450 }
6451 return CAM_CDS_MODE_MAX;
6452}
6453
6454/*===========================================================================
6455 *
6456 * DESCRIPTION:
6457 *
6458 * PARAMETERS :
6459 * @metadata : metadata information from callback
6460 * @timestamp: metadata buffer timestamp
6461 * @request_id: request id
6462 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006463 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006464 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6465 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006466 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006467 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6468 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006469 *
6470 * RETURN : camera_metadata_t*
6471 * metadata in a format specified by fwk
6472 *==========================================================================*/
6473camera_metadata_t*
6474QCamera3HardwareInterface::translateFromHalMetadata(
6475 metadata_buffer_t *metadata,
6476 nsecs_t timestamp,
6477 int32_t request_id,
6478 const CameraMetadata& jpegMetadata,
6479 uint8_t pipeline_depth,
6480 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006481 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006482 /* DevCamDebug metadata translateFromHalMetadata argument */
6483 uint8_t DevCamDebug_meta_enable,
6484 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006485 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006486 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006487 bool lastMetadataInBatch,
6488 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006489{
6490 CameraMetadata camMetadata;
6491 camera_metadata_t *resultMetadata;
6492
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006493 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006494 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6495 * Timestamp is needed because it's used for shutter notify calculation.
6496 * */
6497 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6498 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006499 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006500 }
6501
Thierry Strudel3d639192016-09-09 11:52:26 -07006502 if (jpegMetadata.entryCount())
6503 camMetadata.append(jpegMetadata);
6504
6505 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6506 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6507 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6508 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006509 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006510 if (mBatchSize == 0) {
6511 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6512 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6513 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006514
Samuel Ha68ba5172016-12-15 18:41:12 -08006515 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6516 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6517 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6518 // DevCamDebug metadata translateFromHalMetadata AF
6519 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6520 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6521 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6522 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6525 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6526 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6527 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6528 }
6529 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6530 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6531 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6532 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6533 }
6534 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6535 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6536 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6537 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6538 }
6539 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6540 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6541 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6542 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6543 }
6544 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6545 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6546 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6547 *DevCamDebug_af_monitor_pdaf_target_pos;
6548 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6549 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6550 }
6551 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6552 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6553 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6554 *DevCamDebug_af_monitor_pdaf_confidence;
6555 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6556 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6557 }
6558 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6559 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6560 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6561 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6562 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6563 }
6564 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6565 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6566 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6567 *DevCamDebug_af_monitor_tof_target_pos;
6568 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6569 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6570 }
6571 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6572 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6573 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6574 *DevCamDebug_af_monitor_tof_confidence;
6575 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6576 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6577 }
6578 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6579 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6580 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6581 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6582 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6583 }
6584 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6585 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6586 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6587 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6588 &fwk_DevCamDebug_af_monitor_type_select, 1);
6589 }
6590 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6591 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6592 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6593 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6594 &fwk_DevCamDebug_af_monitor_refocus, 1);
6595 }
6596 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6597 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6598 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6599 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6600 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6601 }
6602 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6603 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6604 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6605 *DevCamDebug_af_search_pdaf_target_pos;
6606 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6607 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6610 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6611 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6612 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6613 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6616 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6617 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6618 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6619 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6622 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6623 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6624 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6625 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6626 }
6627 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6628 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6629 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6630 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6631 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6634 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6635 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6636 *DevCamDebug_af_search_tof_target_pos;
6637 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6638 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6641 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6642 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6643 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6644 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6647 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6648 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6649 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6650 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6653 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6654 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6655 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6656 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6657 }
6658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6659 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6660 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6661 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6662 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6665 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6666 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6667 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6668 &fwk_DevCamDebug_af_search_type_select, 1);
6669 }
6670 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6671 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6672 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6673 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6674 &fwk_DevCamDebug_af_search_next_pos, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6677 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6678 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6679 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6680 &fwk_DevCamDebug_af_search_target_pos, 1);
6681 }
6682 // DevCamDebug metadata translateFromHalMetadata AEC
6683 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6684 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6685 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6686 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6687 }
6688 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6689 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6690 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6691 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6694 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6695 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6696 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6697 }
6698 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6699 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6700 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6701 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6702 }
6703 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6704 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6705 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6706 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6707 }
6708 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6709 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6710 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6711 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6714 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6715 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6716 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6717 }
6718 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6719 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6720 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6721 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6722 }
Samuel Ha34229982017-02-17 13:51:11 -08006723 // DevCamDebug metadata translateFromHalMetadata zzHDR
6724 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6725 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6726 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6727 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6728 }
6729 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6730 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006731 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006732 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6733 }
6734 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6735 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6736 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6737 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6740 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006741 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006742 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6743 }
6744 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6745 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6746 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6747 *DevCamDebug_aec_hdr_sensitivity_ratio;
6748 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6749 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6750 }
6751 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6752 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6753 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6754 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6755 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6756 }
6757 // DevCamDebug metadata translateFromHalMetadata ADRC
6758 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6759 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6760 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6761 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6762 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6763 }
6764 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6765 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6766 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6767 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6768 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6769 }
6770 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6771 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6772 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6773 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6774 }
6775 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6776 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6777 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6778 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6779 }
6780 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6781 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6782 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6783 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6784 }
6785 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6786 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6787 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6788 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6789 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006790 // DevCamDebug metadata translateFromHalMetadata AWB
6791 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6792 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6793 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6794 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6795 }
6796 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6797 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6798 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6799 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6800 }
6801 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6802 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6803 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6804 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6805 }
6806 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6807 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6808 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6809 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6810 }
6811 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6812 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6813 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6814 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6815 }
6816 }
6817 // atrace_end(ATRACE_TAG_ALWAYS);
6818
Thierry Strudel3d639192016-09-09 11:52:26 -07006819 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6820 int64_t fwk_frame_number = *frame_number;
6821 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6822 }
6823
6824 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6825 int32_t fps_range[2];
6826 fps_range[0] = (int32_t)float_range->min_fps;
6827 fps_range[1] = (int32_t)float_range->max_fps;
6828 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6829 fps_range, 2);
6830 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6831 fps_range[0], fps_range[1]);
6832 }
6833
6834 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6835 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6836 }
6837
6838 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6839 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6840 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6841 *sceneMode);
6842 if (NAME_NOT_FOUND != val) {
6843 uint8_t fwkSceneMode = (uint8_t)val;
6844 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6845 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6846 fwkSceneMode);
6847 }
6848 }
6849
6850 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6851 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6852 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6853 }
6854
6855 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6856 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6857 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6858 }
6859
6860 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6861 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6862 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6863 }
6864
6865 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6866 CAM_INTF_META_EDGE_MODE, metadata) {
6867 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6868 }
6869
6870 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6871 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6872 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6873 }
6874
6875 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6876 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6877 }
6878
6879 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6880 if (0 <= *flashState) {
6881 uint8_t fwk_flashState = (uint8_t) *flashState;
6882 if (!gCamCapability[mCameraId]->flash_available) {
6883 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6884 }
6885 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6886 }
6887 }
6888
6889 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6890 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6891 if (NAME_NOT_FOUND != val) {
6892 uint8_t fwk_flashMode = (uint8_t)val;
6893 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6894 }
6895 }
6896
6897 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6898 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6899 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6900 }
6901
6902 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6903 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6904 }
6905
6906 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6907 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6908 }
6909
6910 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6911 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6912 }
6913
6914 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6915 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6916 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6917 }
6918
6919 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6920 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6921 LOGD("fwk_videoStab = %d", fwk_videoStab);
6922 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6923 } else {
6924 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6925 // and so hardcoding the Video Stab result to OFF mode.
6926 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6927 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006928 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006929 }
6930
6931 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6932 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6933 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6934 }
6935
6936 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6937 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6938 }
6939
Thierry Strudel3d639192016-09-09 11:52:26 -07006940 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6941 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006942 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006943
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006944 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6945 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006946
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006947 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006948 blackLevelAppliedPattern->cam_black_level[0],
6949 blackLevelAppliedPattern->cam_black_level[1],
6950 blackLevelAppliedPattern->cam_black_level[2],
6951 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006952 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6953 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006954
6955#ifndef USE_HAL_3_3
6956 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306957 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006958 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306959 fwk_blackLevelInd[0] /= 16.0;
6960 fwk_blackLevelInd[1] /= 16.0;
6961 fwk_blackLevelInd[2] /= 16.0;
6962 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006963 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6964 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006966 }
6967
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006968#ifndef USE_HAL_3_3
6969 // Fixed whitelevel is used by ISP/Sensor
6970 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6971 &gCamCapability[mCameraId]->white_level, 1);
6972#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006973
6974 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6975 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6976 int32_t scalerCropRegion[4];
6977 scalerCropRegion[0] = hScalerCropRegion->left;
6978 scalerCropRegion[1] = hScalerCropRegion->top;
6979 scalerCropRegion[2] = hScalerCropRegion->width;
6980 scalerCropRegion[3] = hScalerCropRegion->height;
6981
6982 // Adjust crop region from sensor output coordinate system to active
6983 // array coordinate system.
6984 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6985 scalerCropRegion[2], scalerCropRegion[3]);
6986
6987 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6988 }
6989
6990 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6991 LOGD("sensorExpTime = %lld", *sensorExpTime);
6992 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6993 }
6994
6995 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6996 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6997 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6998 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6999 }
7000
7001 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7002 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7003 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7004 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7005 sensorRollingShutterSkew, 1);
7006 }
7007
7008 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7009 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7010 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7011
7012 //calculate the noise profile based on sensitivity
7013 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7014 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7015 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7016 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7017 noise_profile[i] = noise_profile_S;
7018 noise_profile[i+1] = noise_profile_O;
7019 }
7020 LOGD("noise model entry (S, O) is (%f, %f)",
7021 noise_profile_S, noise_profile_O);
7022 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7023 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7024 }
7025
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007026#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007027 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007028 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007029 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007030 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007031 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7032 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7033 }
7034 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007035#endif
7036
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7038 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7039 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7040 }
7041
7042 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7043 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7044 *faceDetectMode);
7045 if (NAME_NOT_FOUND != val) {
7046 uint8_t fwk_faceDetectMode = (uint8_t)val;
7047 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7048
7049 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7050 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7051 CAM_INTF_META_FACE_DETECTION, metadata) {
7052 uint8_t numFaces = MIN(
7053 faceDetectionInfo->num_faces_detected, MAX_ROI);
7054 int32_t faceIds[MAX_ROI];
7055 uint8_t faceScores[MAX_ROI];
7056 int32_t faceRectangles[MAX_ROI * 4];
7057 int32_t faceLandmarks[MAX_ROI * 6];
7058 size_t j = 0, k = 0;
7059
7060 for (size_t i = 0; i < numFaces; i++) {
7061 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7062 // Adjust crop region from sensor output coordinate system to active
7063 // array coordinate system.
7064 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7065 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7066 rect.width, rect.height);
7067
7068 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7069 faceRectangles+j, -1);
7070
Jason Lee8ce36fa2017-04-19 19:40:37 -07007071 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7072 "bottom-right (%d, %d)",
7073 faceDetectionInfo->frame_id, i,
7074 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7075 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7076
Thierry Strudel3d639192016-09-09 11:52:26 -07007077 j+= 4;
7078 }
7079 if (numFaces <= 0) {
7080 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7081 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7082 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7083 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7084 }
7085
7086 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7087 numFaces);
7088 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7089 faceRectangles, numFaces * 4U);
7090 if (fwk_faceDetectMode ==
7091 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7092 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7093 CAM_INTF_META_FACE_LANDMARK, metadata) {
7094
7095 for (size_t i = 0; i < numFaces; i++) {
7096 // Map the co-ordinate sensor output coordinate system to active
7097 // array coordinate system.
7098 mCropRegionMapper.toActiveArray(
7099 landmarks->face_landmarks[i].left_eye_center.x,
7100 landmarks->face_landmarks[i].left_eye_center.y);
7101 mCropRegionMapper.toActiveArray(
7102 landmarks->face_landmarks[i].right_eye_center.x,
7103 landmarks->face_landmarks[i].right_eye_center.y);
7104 mCropRegionMapper.toActiveArray(
7105 landmarks->face_landmarks[i].mouth_center.x,
7106 landmarks->face_landmarks[i].mouth_center.y);
7107
7108 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007109
7110 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7111 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7112 faceDetectionInfo->frame_id, i,
7113 faceLandmarks[k + LEFT_EYE_X],
7114 faceLandmarks[k + LEFT_EYE_Y],
7115 faceLandmarks[k + RIGHT_EYE_X],
7116 faceLandmarks[k + RIGHT_EYE_Y],
7117 faceLandmarks[k + MOUTH_X],
7118 faceLandmarks[k + MOUTH_Y]);
7119
Thierry Strudel04e026f2016-10-10 11:27:36 -07007120 k+= TOTAL_LANDMARK_INDICES;
7121 }
7122 } else {
7123 for (size_t i = 0; i < numFaces; i++) {
7124 setInvalidLandmarks(faceLandmarks+k);
7125 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007126 }
7127 }
7128
Jason Lee49619db2017-04-13 12:07:22 -07007129 for (size_t i = 0; i < numFaces; i++) {
7130 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7131
7132 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7133 faceDetectionInfo->frame_id, i, faceIds[i]);
7134 }
7135
Thierry Strudel3d639192016-09-09 11:52:26 -07007136 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7137 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7138 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007139 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007140 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7141 CAM_INTF_META_FACE_BLINK, metadata) {
7142 uint8_t detected[MAX_ROI];
7143 uint8_t degree[MAX_ROI * 2];
7144 for (size_t i = 0; i < numFaces; i++) {
7145 detected[i] = blinks->blink[i].blink_detected;
7146 degree[2 * i] = blinks->blink[i].left_blink;
7147 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007148
Jason Lee49619db2017-04-13 12:07:22 -07007149 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7150 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7151 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7152 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007153 }
7154 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7155 detected, numFaces);
7156 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7157 degree, numFaces * 2);
7158 }
7159 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7160 CAM_INTF_META_FACE_SMILE, metadata) {
7161 uint8_t degree[MAX_ROI];
7162 uint8_t confidence[MAX_ROI];
7163 for (size_t i = 0; i < numFaces; i++) {
7164 degree[i] = smiles->smile[i].smile_degree;
7165 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007166
Jason Lee49619db2017-04-13 12:07:22 -07007167 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7168 "smile_degree=%d, smile_score=%d",
7169 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007170 }
7171 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7172 degree, numFaces);
7173 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7174 confidence, numFaces);
7175 }
7176 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7177 CAM_INTF_META_FACE_GAZE, metadata) {
7178 int8_t angle[MAX_ROI];
7179 int32_t direction[MAX_ROI * 3];
7180 int8_t degree[MAX_ROI * 2];
7181 for (size_t i = 0; i < numFaces; i++) {
7182 angle[i] = gazes->gaze[i].gaze_angle;
7183 direction[3 * i] = gazes->gaze[i].updown_dir;
7184 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7185 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7186 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7187 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007188
7189 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7190 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7191 "left_right_gaze=%d, top_bottom_gaze=%d",
7192 faceDetectionInfo->frame_id, i, angle[i],
7193 direction[3 * i], direction[3 * i + 1],
7194 direction[3 * i + 2],
7195 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007196 }
7197 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7198 (uint8_t *)angle, numFaces);
7199 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7200 direction, numFaces * 3);
7201 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7202 (uint8_t *)degree, numFaces * 2);
7203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007204 }
7205 }
7206 }
7207 }
7208
7209 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7210 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007211 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007212 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007213 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007214
Shuzhen Wang14415f52016-11-16 18:26:18 -08007215 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7216 histogramBins = *histBins;
7217 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7218 }
7219
7220 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007221 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7222 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007223 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007224
7225 switch (stats_data->type) {
7226 case CAM_HISTOGRAM_TYPE_BAYER:
7227 switch (stats_data->bayer_stats.data_type) {
7228 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007229 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7230 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007231 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007232 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7233 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007234 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007235 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7236 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007238 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007239 case CAM_STATS_CHANNEL_R:
7240 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007241 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7242 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243 }
7244 break;
7245 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007246 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007247 break;
7248 }
7249
Shuzhen Wang14415f52016-11-16 18:26:18 -08007250 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007251 }
7252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007253 }
7254
7255 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7256 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7257 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7258 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7259 }
7260
7261 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7262 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7263 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7264 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7265 }
7266
7267 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7268 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7269 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7270 CAM_MAX_SHADING_MAP_HEIGHT);
7271 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7272 CAM_MAX_SHADING_MAP_WIDTH);
7273 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7274 lensShadingMap->lens_shading, 4U * map_width * map_height);
7275 }
7276
7277 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7278 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7279 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7280 }
7281
7282 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7283 //Populate CAM_INTF_META_TONEMAP_CURVES
7284 /* ch0 = G, ch 1 = B, ch 2 = R*/
7285 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7286 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7287 tonemap->tonemap_points_cnt,
7288 CAM_MAX_TONEMAP_CURVE_SIZE);
7289 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7290 }
7291
7292 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7293 &tonemap->curves[0].tonemap_points[0][0],
7294 tonemap->tonemap_points_cnt * 2);
7295
7296 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7297 &tonemap->curves[1].tonemap_points[0][0],
7298 tonemap->tonemap_points_cnt * 2);
7299
7300 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7301 &tonemap->curves[2].tonemap_points[0][0],
7302 tonemap->tonemap_points_cnt * 2);
7303 }
7304
7305 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7306 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7307 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7308 CC_GAIN_MAX);
7309 }
7310
7311 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7312 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7313 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7314 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7315 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7316 }
7317
7318 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7319 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7320 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7321 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7322 toneCurve->tonemap_points_cnt,
7323 CAM_MAX_TONEMAP_CURVE_SIZE);
7324 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7325 }
7326 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7327 (float*)toneCurve->curve.tonemap_points,
7328 toneCurve->tonemap_points_cnt * 2);
7329 }
7330
7331 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7332 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7333 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7334 predColorCorrectionGains->gains, 4);
7335 }
7336
7337 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7338 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7339 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7340 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7341 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7342 }
7343
7344 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7345 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7346 }
7347
7348 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7349 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7350 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7351 }
7352
7353 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7354 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7355 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7356 }
7357
7358 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7359 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7360 *effectMode);
7361 if (NAME_NOT_FOUND != val) {
7362 uint8_t fwk_effectMode = (uint8_t)val;
7363 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7364 }
7365 }
7366
7367 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7368 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7369 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7370 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7371 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7372 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7373 }
7374 int32_t fwk_testPatternData[4];
7375 fwk_testPatternData[0] = testPatternData->r;
7376 fwk_testPatternData[3] = testPatternData->b;
7377 switch (gCamCapability[mCameraId]->color_arrangement) {
7378 case CAM_FILTER_ARRANGEMENT_RGGB:
7379 case CAM_FILTER_ARRANGEMENT_GRBG:
7380 fwk_testPatternData[1] = testPatternData->gr;
7381 fwk_testPatternData[2] = testPatternData->gb;
7382 break;
7383 case CAM_FILTER_ARRANGEMENT_GBRG:
7384 case CAM_FILTER_ARRANGEMENT_BGGR:
7385 fwk_testPatternData[2] = testPatternData->gr;
7386 fwk_testPatternData[1] = testPatternData->gb;
7387 break;
7388 default:
7389 LOGE("color arrangement %d is not supported",
7390 gCamCapability[mCameraId]->color_arrangement);
7391 break;
7392 }
7393 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7394 }
7395
7396 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7397 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7398 }
7399
7400 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7401 String8 str((const char *)gps_methods);
7402 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7403 }
7404
7405 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7406 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7407 }
7408
7409 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7410 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7411 }
7412
7413 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7414 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7415 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7416 }
7417
7418 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7419 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7420 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7421 }
7422
7423 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7424 int32_t fwk_thumb_size[2];
7425 fwk_thumb_size[0] = thumb_size->width;
7426 fwk_thumb_size[1] = thumb_size->height;
7427 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7428 }
7429
7430 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7431 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7432 privateData,
7433 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7434 }
7435
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007436 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007437 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007438 meteringMode, 1);
7439 }
7440
Thierry Strudel54dc9782017-02-15 12:12:10 -08007441 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7442 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7443 LOGD("hdr_scene_data: %d %f\n",
7444 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7445 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7446 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7447 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7448 &isHdr, 1);
7449 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7450 &isHdrConfidence, 1);
7451 }
7452
7453
7454
Thierry Strudel3d639192016-09-09 11:52:26 -07007455 if (metadata->is_tuning_params_valid) {
7456 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7457 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7458 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7459
7460
7461 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7462 sizeof(uint32_t));
7463 data += sizeof(uint32_t);
7464
7465 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7466 sizeof(uint32_t));
7467 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7468 data += sizeof(uint32_t);
7469
7470 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7471 sizeof(uint32_t));
7472 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7473 data += sizeof(uint32_t);
7474
7475 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7476 sizeof(uint32_t));
7477 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7478 data += sizeof(uint32_t);
7479
7480 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7481 sizeof(uint32_t));
7482 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7483 data += sizeof(uint32_t);
7484
7485 metadata->tuning_params.tuning_mod3_data_size = 0;
7486 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7487 sizeof(uint32_t));
7488 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7489 data += sizeof(uint32_t);
7490
7491 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7492 TUNING_SENSOR_DATA_MAX);
7493 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7494 count);
7495 data += count;
7496
7497 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7498 TUNING_VFE_DATA_MAX);
7499 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7500 count);
7501 data += count;
7502
7503 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7504 TUNING_CPP_DATA_MAX);
7505 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7506 count);
7507 data += count;
7508
7509 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7510 TUNING_CAC_DATA_MAX);
7511 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7512 count);
7513 data += count;
7514
7515 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7516 (int32_t *)(void *)tuning_meta_data_blob,
7517 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7518 }
7519
7520 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7521 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7522 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7523 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7524 NEUTRAL_COL_POINTS);
7525 }
7526
7527 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7528 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7529 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7530 }
7531
7532 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7533 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7534 // Adjust crop region from sensor output coordinate system to active
7535 // array coordinate system.
7536 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7537 hAeRegions->rect.width, hAeRegions->rect.height);
7538
7539 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7540 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7541 REGIONS_TUPLE_COUNT);
7542 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7543 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7544 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7545 hAeRegions->rect.height);
7546 }
7547
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007548 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7549 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7550 if (NAME_NOT_FOUND != val) {
7551 uint8_t fwkAfMode = (uint8_t)val;
7552 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7553 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7554 } else {
7555 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7556 val);
7557 }
7558 }
7559
Thierry Strudel3d639192016-09-09 11:52:26 -07007560 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7561 uint8_t fwk_afState = (uint8_t) *afState;
7562 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007563 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007564 }
7565
7566 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7567 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7568 }
7569
7570 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7571 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7572 }
7573
7574 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7575 uint8_t fwk_lensState = *lensState;
7576 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7577 }
7578
Thierry Strudel3d639192016-09-09 11:52:26 -07007579
7580 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007581 uint32_t ab_mode = *hal_ab_mode;
7582 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7583 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7584 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007586 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007587 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007588 if (NAME_NOT_FOUND != val) {
7589 uint8_t fwk_ab_mode = (uint8_t)val;
7590 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7591 }
7592 }
7593
7594 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7595 int val = lookupFwkName(SCENE_MODES_MAP,
7596 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7597 if (NAME_NOT_FOUND != val) {
7598 uint8_t fwkBestshotMode = (uint8_t)val;
7599 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7600 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7601 } else {
7602 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7603 }
7604 }
7605
7606 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7607 uint8_t fwk_mode = (uint8_t) *mode;
7608 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7609 }
7610
7611 /* Constant metadata values to be update*/
7612 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7613 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7614
7615 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7616 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7617
7618 int32_t hotPixelMap[2];
7619 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7620
7621 // CDS
7622 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7623 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7624 }
7625
Thierry Strudel04e026f2016-10-10 11:27:36 -07007626 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7627 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007628 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007629 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7630 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7631 } else {
7632 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7633 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007634
7635 if(fwk_hdr != curr_hdr_state) {
7636 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7637 if(fwk_hdr)
7638 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7639 else
7640 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7641 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007642 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7643 }
7644
Thierry Strudel54dc9782017-02-15 12:12:10 -08007645 //binning correction
7646 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7647 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7648 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7649 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7650 }
7651
Thierry Strudel04e026f2016-10-10 11:27:36 -07007652 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007653 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007654 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7655 int8_t is_ir_on = 0;
7656
7657 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7658 if(is_ir_on != curr_ir_state) {
7659 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7660 if(is_ir_on)
7661 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7662 else
7663 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7664 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007665 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007666 }
7667
Thierry Strudel269c81a2016-10-12 12:13:59 -07007668 // AEC SPEED
7669 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7670 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7671 }
7672
7673 // AWB SPEED
7674 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7675 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7676 }
7677
Thierry Strudel3d639192016-09-09 11:52:26 -07007678 // TNR
7679 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7680 uint8_t tnr_enable = tnr->denoise_enable;
7681 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007682 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7683 int8_t is_tnr_on = 0;
7684
7685 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7686 if(is_tnr_on != curr_tnr_state) {
7687 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7688 if(is_tnr_on)
7689 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7690 else
7691 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7692 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007693
7694 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7695 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7696 }
7697
7698 // Reprocess crop data
7699 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7700 uint8_t cnt = crop_data->num_of_streams;
7701 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7702 // mm-qcamera-daemon only posts crop_data for streams
7703 // not linked to pproc. So no valid crop metadata is not
7704 // necessarily an error case.
7705 LOGD("No valid crop metadata entries");
7706 } else {
7707 uint32_t reproc_stream_id;
7708 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7709 LOGD("No reprocessible stream found, ignore crop data");
7710 } else {
7711 int rc = NO_ERROR;
7712 Vector<int32_t> roi_map;
7713 int32_t *crop = new int32_t[cnt*4];
7714 if (NULL == crop) {
7715 rc = NO_MEMORY;
7716 }
7717 if (NO_ERROR == rc) {
7718 int32_t streams_found = 0;
7719 for (size_t i = 0; i < cnt; i++) {
7720 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7721 if (pprocDone) {
7722 // HAL already does internal reprocessing,
7723 // either via reprocessing before JPEG encoding,
7724 // or offline postprocessing for pproc bypass case.
7725 crop[0] = 0;
7726 crop[1] = 0;
7727 crop[2] = mInputStreamInfo.dim.width;
7728 crop[3] = mInputStreamInfo.dim.height;
7729 } else {
7730 crop[0] = crop_data->crop_info[i].crop.left;
7731 crop[1] = crop_data->crop_info[i].crop.top;
7732 crop[2] = crop_data->crop_info[i].crop.width;
7733 crop[3] = crop_data->crop_info[i].crop.height;
7734 }
7735 roi_map.add(crop_data->crop_info[i].roi_map.left);
7736 roi_map.add(crop_data->crop_info[i].roi_map.top);
7737 roi_map.add(crop_data->crop_info[i].roi_map.width);
7738 roi_map.add(crop_data->crop_info[i].roi_map.height);
7739 streams_found++;
7740 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7741 crop[0], crop[1], crop[2], crop[3]);
7742 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7743 crop_data->crop_info[i].roi_map.left,
7744 crop_data->crop_info[i].roi_map.top,
7745 crop_data->crop_info[i].roi_map.width,
7746 crop_data->crop_info[i].roi_map.height);
7747 break;
7748
7749 }
7750 }
7751 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7752 &streams_found, 1);
7753 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7754 crop, (size_t)(streams_found * 4));
7755 if (roi_map.array()) {
7756 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7757 roi_map.array(), roi_map.size());
7758 }
7759 }
7760 if (crop) {
7761 delete [] crop;
7762 }
7763 }
7764 }
7765 }
7766
7767 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7768 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7769 // so hardcoding the CAC result to OFF mode.
7770 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7771 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7772 } else {
7773 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7774 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7775 *cacMode);
7776 if (NAME_NOT_FOUND != val) {
7777 uint8_t resultCacMode = (uint8_t)val;
7778 // check whether CAC result from CB is equal to Framework set CAC mode
7779 // If not equal then set the CAC mode came in corresponding request
7780 if (fwk_cacMode != resultCacMode) {
7781 resultCacMode = fwk_cacMode;
7782 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007783 //Check if CAC is disabled by property
7784 if (m_cacModeDisabled) {
7785 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7786 }
7787
Thierry Strudel3d639192016-09-09 11:52:26 -07007788 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7789 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7790 } else {
7791 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7792 }
7793 }
7794 }
7795
7796 // Post blob of cam_cds_data through vendor tag.
7797 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7798 uint8_t cnt = cdsInfo->num_of_streams;
7799 cam_cds_data_t cdsDataOverride;
7800 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7801 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7802 cdsDataOverride.num_of_streams = 1;
7803 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7804 uint32_t reproc_stream_id;
7805 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7806 LOGD("No reprocessible stream found, ignore cds data");
7807 } else {
7808 for (size_t i = 0; i < cnt; i++) {
7809 if (cdsInfo->cds_info[i].stream_id ==
7810 reproc_stream_id) {
7811 cdsDataOverride.cds_info[0].cds_enable =
7812 cdsInfo->cds_info[i].cds_enable;
7813 break;
7814 }
7815 }
7816 }
7817 } else {
7818 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7819 }
7820 camMetadata.update(QCAMERA3_CDS_INFO,
7821 (uint8_t *)&cdsDataOverride,
7822 sizeof(cam_cds_data_t));
7823 }
7824
7825 // Ldaf calibration data
7826 if (!mLdafCalibExist) {
7827 IF_META_AVAILABLE(uint32_t, ldafCalib,
7828 CAM_INTF_META_LDAF_EXIF, metadata) {
7829 mLdafCalibExist = true;
7830 mLdafCalib[0] = ldafCalib[0];
7831 mLdafCalib[1] = ldafCalib[1];
7832 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7833 ldafCalib[0], ldafCalib[1]);
7834 }
7835 }
7836
Thierry Strudel54dc9782017-02-15 12:12:10 -08007837 // EXIF debug data through vendor tag
7838 /*
7839 * Mobicat Mask can assume 3 values:
7840 * 1 refers to Mobicat data,
7841 * 2 refers to Stats Debug and Exif Debug Data
7842 * 3 refers to Mobicat and Stats Debug Data
7843 * We want to make sure that we are sending Exif debug data
7844 * only when Mobicat Mask is 2.
7845 */
7846 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7847 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7848 (uint8_t *)(void *)mExifParams.debug_params,
7849 sizeof(mm_jpeg_debug_exif_params_t));
7850 }
7851
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007852 // Reprocess and DDM debug data through vendor tag
7853 cam_reprocess_info_t repro_info;
7854 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007855 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7856 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007857 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007858 }
7859 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7860 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007861 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 }
7863 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7864 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007865 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007866 }
7867 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7868 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007869 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007870 }
7871 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7872 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007873 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007874 }
7875 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007876 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007877 }
7878 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7879 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007880 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007882 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7883 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7884 }
7885 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7886 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7887 }
7888 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7889 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007890
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007891 // INSTANT AEC MODE
7892 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7893 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7894 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7895 }
7896
Shuzhen Wange763e802016-03-31 10:24:29 -07007897 // AF scene change
7898 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7899 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7900 }
7901
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007902 // Enable ZSL
7903 if (enableZsl != nullptr) {
7904 uint8_t value = *enableZsl ?
7905 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7906 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7907 }
7908
Xu Han821ea9c2017-05-23 09:00:40 -07007909 // OIS Data
7910 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7911 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7912 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7913 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7914 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7915 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7916 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7917 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7918 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7919 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7920 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7921 }
7922
Thierry Strudel3d639192016-09-09 11:52:26 -07007923 resultMetadata = camMetadata.release();
7924 return resultMetadata;
7925}
7926
7927/*===========================================================================
7928 * FUNCTION : saveExifParams
7929 *
7930 * DESCRIPTION:
7931 *
7932 * PARAMETERS :
7933 * @metadata : metadata information from callback
7934 *
7935 * RETURN : none
7936 *
7937 *==========================================================================*/
7938void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7939{
7940 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7941 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7942 if (mExifParams.debug_params) {
7943 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7944 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7945 }
7946 }
7947 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7948 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7949 if (mExifParams.debug_params) {
7950 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7951 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7952 }
7953 }
7954 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7955 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7956 if (mExifParams.debug_params) {
7957 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7958 mExifParams.debug_params->af_debug_params_valid = TRUE;
7959 }
7960 }
7961 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7962 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7963 if (mExifParams.debug_params) {
7964 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7965 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7966 }
7967 }
7968 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7969 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7970 if (mExifParams.debug_params) {
7971 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7972 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7973 }
7974 }
7975 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7976 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7977 if (mExifParams.debug_params) {
7978 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7979 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7980 }
7981 }
7982 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7983 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7984 if (mExifParams.debug_params) {
7985 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7986 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7987 }
7988 }
7989 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7990 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7991 if (mExifParams.debug_params) {
7992 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7993 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7994 }
7995 }
7996}
7997
7998/*===========================================================================
7999 * FUNCTION : get3AExifParams
8000 *
8001 * DESCRIPTION:
8002 *
8003 * PARAMETERS : none
8004 *
8005 *
8006 * RETURN : mm_jpeg_exif_params_t
8007 *
8008 *==========================================================================*/
8009mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8010{
8011 return mExifParams;
8012}
8013
8014/*===========================================================================
8015 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8016 *
8017 * DESCRIPTION:
8018 *
8019 * PARAMETERS :
8020 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008021 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8022 * urgent metadata in a batch. Always true for
8023 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008024 *
8025 * RETURN : camera_metadata_t*
8026 * metadata in a format specified by fwk
8027 *==========================================================================*/
8028camera_metadata_t*
8029QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008030 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008031{
8032 CameraMetadata camMetadata;
8033 camera_metadata_t *resultMetadata;
8034
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008035 if (!lastUrgentMetadataInBatch) {
8036 /* In batch mode, use empty metadata if this is not the last in batch
8037 */
8038 resultMetadata = allocate_camera_metadata(0, 0);
8039 return resultMetadata;
8040 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008041
8042 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8043 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8044 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8045 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8046 }
8047
8048 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8049 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8050 &aecTrigger->trigger, 1);
8051 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8052 &aecTrigger->trigger_id, 1);
8053 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8054 aecTrigger->trigger);
8055 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8056 aecTrigger->trigger_id);
8057 }
8058
8059 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8060 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8061 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8062 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8063 }
8064
Thierry Strudel3d639192016-09-09 11:52:26 -07008065 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8066 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8067 &af_trigger->trigger, 1);
8068 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8069 af_trigger->trigger);
8070 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8071 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8072 af_trigger->trigger_id);
8073 }
8074
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008075 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8076 /*af regions*/
8077 int32_t afRegions[REGIONS_TUPLE_COUNT];
8078 // Adjust crop region from sensor output coordinate system to active
8079 // array coordinate system.
8080 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8081 hAfRegions->rect.width, hAfRegions->rect.height);
8082
8083 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8084 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8085 REGIONS_TUPLE_COUNT);
8086 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8087 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8088 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8089 hAfRegions->rect.height);
8090 }
8091
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008092 // AF region confidence
8093 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8094 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8095 }
8096
Thierry Strudel3d639192016-09-09 11:52:26 -07008097 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8098 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8099 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8100 if (NAME_NOT_FOUND != val) {
8101 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8102 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8103 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8104 } else {
8105 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8106 }
8107 }
8108
8109 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8110 uint32_t aeMode = CAM_AE_MODE_MAX;
8111 int32_t flashMode = CAM_FLASH_MODE_MAX;
8112 int32_t redeye = -1;
8113 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8114 aeMode = *pAeMode;
8115 }
8116 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8117 flashMode = *pFlashMode;
8118 }
8119 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8120 redeye = *pRedeye;
8121 }
8122
8123 if (1 == redeye) {
8124 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8125 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8126 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8127 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8128 flashMode);
8129 if (NAME_NOT_FOUND != val) {
8130 fwk_aeMode = (uint8_t)val;
8131 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8132 } else {
8133 LOGE("Unsupported flash mode %d", flashMode);
8134 }
8135 } else if (aeMode == CAM_AE_MODE_ON) {
8136 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8137 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8138 } else if (aeMode == CAM_AE_MODE_OFF) {
8139 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8140 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008141 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8142 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8143 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008144 } else {
8145 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8146 "flashMode:%d, aeMode:%u!!!",
8147 redeye, flashMode, aeMode);
8148 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008149 if (mInstantAEC) {
8150 // Increment frame Idx count untill a bound reached for instant AEC.
8151 mInstantAecFrameIdxCount++;
8152 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8153 CAM_INTF_META_AEC_INFO, metadata) {
8154 LOGH("ae_params->settled = %d",ae_params->settled);
8155 // If AEC settled, or if number of frames reached bound value,
8156 // should reset instant AEC.
8157 if (ae_params->settled ||
8158 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8159 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8160 mInstantAEC = false;
8161 mResetInstantAEC = true;
8162 mInstantAecFrameIdxCount = 0;
8163 }
8164 }
8165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008166 resultMetadata = camMetadata.release();
8167 return resultMetadata;
8168}
8169
8170/*===========================================================================
8171 * FUNCTION : dumpMetadataToFile
8172 *
8173 * DESCRIPTION: Dumps tuning metadata to file system
8174 *
8175 * PARAMETERS :
8176 * @meta : tuning metadata
8177 * @dumpFrameCount : current dump frame count
8178 * @enabled : Enable mask
8179 *
8180 *==========================================================================*/
8181void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8182 uint32_t &dumpFrameCount,
8183 bool enabled,
8184 const char *type,
8185 uint32_t frameNumber)
8186{
8187 //Some sanity checks
8188 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8189 LOGE("Tuning sensor data size bigger than expected %d: %d",
8190 meta.tuning_sensor_data_size,
8191 TUNING_SENSOR_DATA_MAX);
8192 return;
8193 }
8194
8195 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8196 LOGE("Tuning VFE data size bigger than expected %d: %d",
8197 meta.tuning_vfe_data_size,
8198 TUNING_VFE_DATA_MAX);
8199 return;
8200 }
8201
8202 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8203 LOGE("Tuning CPP data size bigger than expected %d: %d",
8204 meta.tuning_cpp_data_size,
8205 TUNING_CPP_DATA_MAX);
8206 return;
8207 }
8208
8209 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8210 LOGE("Tuning CAC data size bigger than expected %d: %d",
8211 meta.tuning_cac_data_size,
8212 TUNING_CAC_DATA_MAX);
8213 return;
8214 }
8215 //
8216
8217 if(enabled){
8218 char timeBuf[FILENAME_MAX];
8219 char buf[FILENAME_MAX];
8220 memset(buf, 0, sizeof(buf));
8221 memset(timeBuf, 0, sizeof(timeBuf));
8222 time_t current_time;
8223 struct tm * timeinfo;
8224 time (&current_time);
8225 timeinfo = localtime (&current_time);
8226 if (timeinfo != NULL) {
8227 strftime (timeBuf, sizeof(timeBuf),
8228 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8229 }
8230 String8 filePath(timeBuf);
8231 snprintf(buf,
8232 sizeof(buf),
8233 "%dm_%s_%d.bin",
8234 dumpFrameCount,
8235 type,
8236 frameNumber);
8237 filePath.append(buf);
8238 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8239 if (file_fd >= 0) {
8240 ssize_t written_len = 0;
8241 meta.tuning_data_version = TUNING_DATA_VERSION;
8242 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8243 written_len += write(file_fd, data, sizeof(uint32_t));
8244 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8245 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8246 written_len += write(file_fd, data, sizeof(uint32_t));
8247 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8248 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8249 written_len += write(file_fd, data, sizeof(uint32_t));
8250 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8251 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8252 written_len += write(file_fd, data, sizeof(uint32_t));
8253 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8254 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8255 written_len += write(file_fd, data, sizeof(uint32_t));
8256 meta.tuning_mod3_data_size = 0;
8257 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8258 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8259 written_len += write(file_fd, data, sizeof(uint32_t));
8260 size_t total_size = meta.tuning_sensor_data_size;
8261 data = (void *)((uint8_t *)&meta.data);
8262 written_len += write(file_fd, data, total_size);
8263 total_size = meta.tuning_vfe_data_size;
8264 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8265 written_len += write(file_fd, data, total_size);
8266 total_size = meta.tuning_cpp_data_size;
8267 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8268 written_len += write(file_fd, data, total_size);
8269 total_size = meta.tuning_cac_data_size;
8270 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8271 written_len += write(file_fd, data, total_size);
8272 close(file_fd);
8273 }else {
8274 LOGE("fail to open file for metadata dumping");
8275 }
8276 }
8277}
8278
8279/*===========================================================================
8280 * FUNCTION : cleanAndSortStreamInfo
8281 *
8282 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8283 * and sort them such that raw stream is at the end of the list
8284 * This is a workaround for camera daemon constraint.
8285 *
8286 * PARAMETERS : None
8287 *
8288 *==========================================================================*/
8289void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8290{
8291 List<stream_info_t *> newStreamInfo;
8292
8293 /*clean up invalid streams*/
8294 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8295 it != mStreamInfo.end();) {
8296 if(((*it)->status) == INVALID){
8297 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8298 delete channel;
8299 free(*it);
8300 it = mStreamInfo.erase(it);
8301 } else {
8302 it++;
8303 }
8304 }
8305
8306 // Move preview/video/callback/snapshot streams into newList
8307 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8308 it != mStreamInfo.end();) {
8309 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8310 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8311 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8312 newStreamInfo.push_back(*it);
8313 it = mStreamInfo.erase(it);
8314 } else
8315 it++;
8316 }
8317 // Move raw streams into newList
8318 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8319 it != mStreamInfo.end();) {
8320 newStreamInfo.push_back(*it);
8321 it = mStreamInfo.erase(it);
8322 }
8323
8324 mStreamInfo = newStreamInfo;
8325}
8326
8327/*===========================================================================
8328 * FUNCTION : extractJpegMetadata
8329 *
8330 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8331 * JPEG metadata is cached in HAL, and return as part of capture
8332 * result when metadata is returned from camera daemon.
8333 *
8334 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8335 * @request: capture request
8336 *
8337 *==========================================================================*/
8338void QCamera3HardwareInterface::extractJpegMetadata(
8339 CameraMetadata& jpegMetadata,
8340 const camera3_capture_request_t *request)
8341{
8342 CameraMetadata frame_settings;
8343 frame_settings = request->settings;
8344
8345 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8346 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8347 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8348 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8349
8350 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8351 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8352 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8353 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8354
8355 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8356 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8357 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8358 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8359
8360 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8361 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8362 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8363 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8364
8365 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8366 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8367 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8368 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8369
8370 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8371 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8372 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8373 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8374
8375 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8376 int32_t thumbnail_size[2];
8377 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8378 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8379 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8380 int32_t orientation =
8381 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008382 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008383 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8384 int32_t temp;
8385 temp = thumbnail_size[0];
8386 thumbnail_size[0] = thumbnail_size[1];
8387 thumbnail_size[1] = temp;
8388 }
8389 }
8390 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8391 thumbnail_size,
8392 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8393 }
8394
8395}
8396
8397/*===========================================================================
8398 * FUNCTION : convertToRegions
8399 *
8400 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8401 *
8402 * PARAMETERS :
8403 * @rect : cam_rect_t struct to convert
8404 * @region : int32_t destination array
8405 * @weight : if we are converting from cam_area_t, weight is valid
8406 * else weight = -1
8407 *
8408 *==========================================================================*/
8409void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8410 int32_t *region, int weight)
8411{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008412 region[FACE_LEFT] = rect.left;
8413 region[FACE_TOP] = rect.top;
8414 region[FACE_RIGHT] = rect.left + rect.width;
8415 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008416 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008417 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008418 }
8419}
8420
8421/*===========================================================================
8422 * FUNCTION : convertFromRegions
8423 *
8424 * DESCRIPTION: helper method to convert from array to cam_rect_t
8425 *
8426 * PARAMETERS :
8427 * @rect : cam_rect_t struct to convert
8428 * @region : int32_t destination array
8429 * @weight : if we are converting from cam_area_t, weight is valid
8430 * else weight = -1
8431 *
8432 *==========================================================================*/
8433void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008434 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008435{
Thierry Strudel3d639192016-09-09 11:52:26 -07008436 int32_t x_min = frame_settings.find(tag).data.i32[0];
8437 int32_t y_min = frame_settings.find(tag).data.i32[1];
8438 int32_t x_max = frame_settings.find(tag).data.i32[2];
8439 int32_t y_max = frame_settings.find(tag).data.i32[3];
8440 roi.weight = frame_settings.find(tag).data.i32[4];
8441 roi.rect.left = x_min;
8442 roi.rect.top = y_min;
8443 roi.rect.width = x_max - x_min;
8444 roi.rect.height = y_max - y_min;
8445}
8446
8447/*===========================================================================
8448 * FUNCTION : resetIfNeededROI
8449 *
8450 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8451 * crop region
8452 *
8453 * PARAMETERS :
8454 * @roi : cam_area_t struct to resize
8455 * @scalerCropRegion : cam_crop_region_t region to compare against
8456 *
8457 *
8458 *==========================================================================*/
8459bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8460 const cam_crop_region_t* scalerCropRegion)
8461{
8462 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8463 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8464 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8465 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8466
8467 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8468 * without having this check the calculations below to validate if the roi
8469 * is inside scalar crop region will fail resulting in the roi not being
8470 * reset causing algorithm to continue to use stale roi window
8471 */
8472 if (roi->weight == 0) {
8473 return true;
8474 }
8475
8476 if ((roi_x_max < scalerCropRegion->left) ||
8477 // right edge of roi window is left of scalar crop's left edge
8478 (roi_y_max < scalerCropRegion->top) ||
8479 // bottom edge of roi window is above scalar crop's top edge
8480 (roi->rect.left > crop_x_max) ||
8481 // left edge of roi window is beyond(right) of scalar crop's right edge
8482 (roi->rect.top > crop_y_max)){
8483 // top edge of roi windo is above scalar crop's top edge
8484 return false;
8485 }
8486 if (roi->rect.left < scalerCropRegion->left) {
8487 roi->rect.left = scalerCropRegion->left;
8488 }
8489 if (roi->rect.top < scalerCropRegion->top) {
8490 roi->rect.top = scalerCropRegion->top;
8491 }
8492 if (roi_x_max > crop_x_max) {
8493 roi_x_max = crop_x_max;
8494 }
8495 if (roi_y_max > crop_y_max) {
8496 roi_y_max = crop_y_max;
8497 }
8498 roi->rect.width = roi_x_max - roi->rect.left;
8499 roi->rect.height = roi_y_max - roi->rect.top;
8500 return true;
8501}
8502
8503/*===========================================================================
8504 * FUNCTION : convertLandmarks
8505 *
8506 * DESCRIPTION: helper method to extract the landmarks from face detection info
8507 *
8508 * PARAMETERS :
8509 * @landmark_data : input landmark data to be converted
8510 * @landmarks : int32_t destination array
8511 *
8512 *
8513 *==========================================================================*/
8514void QCamera3HardwareInterface::convertLandmarks(
8515 cam_face_landmarks_info_t landmark_data,
8516 int32_t *landmarks)
8517{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008518 if (landmark_data.is_left_eye_valid) {
8519 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8520 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8521 } else {
8522 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8523 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8524 }
8525
8526 if (landmark_data.is_right_eye_valid) {
8527 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8528 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8529 } else {
8530 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8531 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8532 }
8533
8534 if (landmark_data.is_mouth_valid) {
8535 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8536 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8537 } else {
8538 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8539 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8540 }
8541}
8542
8543/*===========================================================================
8544 * FUNCTION : setInvalidLandmarks
8545 *
8546 * DESCRIPTION: helper method to set invalid landmarks
8547 *
8548 * PARAMETERS :
8549 * @landmarks : int32_t destination array
8550 *
8551 *
8552 *==========================================================================*/
8553void QCamera3HardwareInterface::setInvalidLandmarks(
8554 int32_t *landmarks)
8555{
8556 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8557 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8558 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8559 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8560 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8561 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008562}
8563
8564#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008565
8566/*===========================================================================
8567 * FUNCTION : getCapabilities
8568 *
8569 * DESCRIPTION: query camera capability from back-end
8570 *
8571 * PARAMETERS :
8572 * @ops : mm-interface ops structure
8573 * @cam_handle : camera handle for which we need capability
8574 *
8575 * RETURN : ptr type of capability structure
8576 * capability for success
8577 * NULL for failure
8578 *==========================================================================*/
8579cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8580 uint32_t cam_handle)
8581{
8582 int rc = NO_ERROR;
8583 QCamera3HeapMemory *capabilityHeap = NULL;
8584 cam_capability_t *cap_ptr = NULL;
8585
8586 if (ops == NULL) {
8587 LOGE("Invalid arguments");
8588 return NULL;
8589 }
8590
8591 capabilityHeap = new QCamera3HeapMemory(1);
8592 if (capabilityHeap == NULL) {
8593 LOGE("creation of capabilityHeap failed");
8594 return NULL;
8595 }
8596
8597 /* Allocate memory for capability buffer */
8598 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8599 if(rc != OK) {
8600 LOGE("No memory for cappability");
8601 goto allocate_failed;
8602 }
8603
8604 /* Map memory for capability buffer */
8605 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8606
8607 rc = ops->map_buf(cam_handle,
8608 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8609 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8610 if(rc < 0) {
8611 LOGE("failed to map capability buffer");
8612 rc = FAILED_TRANSACTION;
8613 goto map_failed;
8614 }
8615
8616 /* Query Capability */
8617 rc = ops->query_capability(cam_handle);
8618 if(rc < 0) {
8619 LOGE("failed to query capability");
8620 rc = FAILED_TRANSACTION;
8621 goto query_failed;
8622 }
8623
8624 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8625 if (cap_ptr == NULL) {
8626 LOGE("out of memory");
8627 rc = NO_MEMORY;
8628 goto query_failed;
8629 }
8630
8631 memset(cap_ptr, 0, sizeof(cam_capability_t));
8632 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8633
8634 int index;
8635 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8636 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8637 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8638 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8639 }
8640
8641query_failed:
8642 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8643map_failed:
8644 capabilityHeap->deallocate();
8645allocate_failed:
8646 delete capabilityHeap;
8647
8648 if (rc != NO_ERROR) {
8649 return NULL;
8650 } else {
8651 return cap_ptr;
8652 }
8653}
8654
Thierry Strudel3d639192016-09-09 11:52:26 -07008655/*===========================================================================
8656 * FUNCTION : initCapabilities
8657 *
8658 * DESCRIPTION: initialize camera capabilities in static data struct
8659 *
8660 * PARAMETERS :
8661 * @cameraId : camera Id
8662 *
8663 * RETURN : int32_t type of status
8664 * NO_ERROR -- success
8665 * none-zero failure code
8666 *==========================================================================*/
8667int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8668{
8669 int rc = 0;
8670 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008671 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008672
8673 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8674 if (rc) {
8675 LOGE("camera_open failed. rc = %d", rc);
8676 goto open_failed;
8677 }
8678 if (!cameraHandle) {
8679 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8680 goto open_failed;
8681 }
8682
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008683 handle = get_main_camera_handle(cameraHandle->camera_handle);
8684 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8685 if (gCamCapability[cameraId] == NULL) {
8686 rc = FAILED_TRANSACTION;
8687 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008688 }
8689
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008690 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008691 if (is_dual_camera_by_idx(cameraId)) {
8692 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8693 gCamCapability[cameraId]->aux_cam_cap =
8694 getCapabilities(cameraHandle->ops, handle);
8695 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8696 rc = FAILED_TRANSACTION;
8697 free(gCamCapability[cameraId]);
8698 goto failed_op;
8699 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008700
8701 // Copy the main camera capability to main_cam_cap struct
8702 gCamCapability[cameraId]->main_cam_cap =
8703 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8704 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8705 LOGE("out of memory");
8706 rc = NO_MEMORY;
8707 goto failed_op;
8708 }
8709 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8710 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008711 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008712failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008713 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8714 cameraHandle = NULL;
8715open_failed:
8716 return rc;
8717}
8718
8719/*==========================================================================
8720 * FUNCTION : get3Aversion
8721 *
8722 * DESCRIPTION: get the Q3A S/W version
8723 *
8724 * PARAMETERS :
8725 * @sw_version: Reference of Q3A structure which will hold version info upon
8726 * return
8727 *
8728 * RETURN : None
8729 *
8730 *==========================================================================*/
8731void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8732{
8733 if(gCamCapability[mCameraId])
8734 sw_version = gCamCapability[mCameraId]->q3a_version;
8735 else
8736 LOGE("Capability structure NULL!");
8737}
8738
8739
8740/*===========================================================================
8741 * FUNCTION : initParameters
8742 *
8743 * DESCRIPTION: initialize camera parameters
8744 *
8745 * PARAMETERS :
8746 *
8747 * RETURN : int32_t type of status
8748 * NO_ERROR -- success
8749 * none-zero failure code
8750 *==========================================================================*/
8751int QCamera3HardwareInterface::initParameters()
8752{
8753 int rc = 0;
8754
8755 //Allocate Set Param Buffer
8756 mParamHeap = new QCamera3HeapMemory(1);
8757 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8758 if(rc != OK) {
8759 rc = NO_MEMORY;
8760 LOGE("Failed to allocate SETPARM Heap memory");
8761 delete mParamHeap;
8762 mParamHeap = NULL;
8763 return rc;
8764 }
8765
8766 //Map memory for parameters buffer
8767 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8768 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8769 mParamHeap->getFd(0),
8770 sizeof(metadata_buffer_t),
8771 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8772 if(rc < 0) {
8773 LOGE("failed to map SETPARM buffer");
8774 rc = FAILED_TRANSACTION;
8775 mParamHeap->deallocate();
8776 delete mParamHeap;
8777 mParamHeap = NULL;
8778 return rc;
8779 }
8780
8781 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8782
8783 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8784 return rc;
8785}
8786
8787/*===========================================================================
8788 * FUNCTION : deinitParameters
8789 *
8790 * DESCRIPTION: de-initialize camera parameters
8791 *
8792 * PARAMETERS :
8793 *
8794 * RETURN : NONE
8795 *==========================================================================*/
8796void QCamera3HardwareInterface::deinitParameters()
8797{
8798 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8799 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8800
8801 mParamHeap->deallocate();
8802 delete mParamHeap;
8803 mParamHeap = NULL;
8804
8805 mParameters = NULL;
8806
8807 free(mPrevParameters);
8808 mPrevParameters = NULL;
8809}
8810
8811/*===========================================================================
8812 * FUNCTION : calcMaxJpegSize
8813 *
8814 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8815 *
8816 * PARAMETERS :
8817 *
8818 * RETURN : max_jpeg_size
8819 *==========================================================================*/
8820size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8821{
8822 size_t max_jpeg_size = 0;
8823 size_t temp_width, temp_height;
8824 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8825 MAX_SIZES_CNT);
8826 for (size_t i = 0; i < count; i++) {
8827 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8828 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8829 if (temp_width * temp_height > max_jpeg_size ) {
8830 max_jpeg_size = temp_width * temp_height;
8831 }
8832 }
8833 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8834 return max_jpeg_size;
8835}
8836
8837/*===========================================================================
8838 * FUNCTION : getMaxRawSize
8839 *
8840 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8841 *
8842 * PARAMETERS :
8843 *
8844 * RETURN : Largest supported Raw Dimension
8845 *==========================================================================*/
8846cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8847{
8848 int max_width = 0;
8849 cam_dimension_t maxRawSize;
8850
8851 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8852 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8853 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8854 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8855 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8856 }
8857 }
8858 return maxRawSize;
8859}
8860
8861
8862/*===========================================================================
8863 * FUNCTION : calcMaxJpegDim
8864 *
8865 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8866 *
8867 * PARAMETERS :
8868 *
8869 * RETURN : max_jpeg_dim
8870 *==========================================================================*/
8871cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8872{
8873 cam_dimension_t max_jpeg_dim;
8874 cam_dimension_t curr_jpeg_dim;
8875 max_jpeg_dim.width = 0;
8876 max_jpeg_dim.height = 0;
8877 curr_jpeg_dim.width = 0;
8878 curr_jpeg_dim.height = 0;
8879 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8880 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8881 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8882 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8883 max_jpeg_dim.width * max_jpeg_dim.height ) {
8884 max_jpeg_dim.width = curr_jpeg_dim.width;
8885 max_jpeg_dim.height = curr_jpeg_dim.height;
8886 }
8887 }
8888 return max_jpeg_dim;
8889}
8890
8891/*===========================================================================
8892 * FUNCTION : addStreamConfig
8893 *
8894 * DESCRIPTION: adds the stream configuration to the array
8895 *
8896 * PARAMETERS :
8897 * @available_stream_configs : pointer to stream configuration array
8898 * @scalar_format : scalar format
8899 * @dim : configuration dimension
8900 * @config_type : input or output configuration type
8901 *
8902 * RETURN : NONE
8903 *==========================================================================*/
8904void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8905 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8906{
8907 available_stream_configs.add(scalar_format);
8908 available_stream_configs.add(dim.width);
8909 available_stream_configs.add(dim.height);
8910 available_stream_configs.add(config_type);
8911}
8912
8913/*===========================================================================
8914 * FUNCTION : suppportBurstCapture
8915 *
8916 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8917 *
8918 * PARAMETERS :
8919 * @cameraId : camera Id
8920 *
8921 * RETURN : true if camera supports BURST_CAPTURE
8922 * false otherwise
8923 *==========================================================================*/
8924bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8925{
8926 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8927 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8928 const int32_t highResWidth = 3264;
8929 const int32_t highResHeight = 2448;
8930
8931 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8932 // Maximum resolution images cannot be captured at >= 10fps
8933 // -> not supporting BURST_CAPTURE
8934 return false;
8935 }
8936
8937 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8938 // Maximum resolution images can be captured at >= 20fps
8939 // --> supporting BURST_CAPTURE
8940 return true;
8941 }
8942
8943 // Find the smallest highRes resolution, or largest resolution if there is none
8944 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8945 MAX_SIZES_CNT);
8946 size_t highRes = 0;
8947 while ((highRes + 1 < totalCnt) &&
8948 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8949 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8950 highResWidth * highResHeight)) {
8951 highRes++;
8952 }
8953 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8954 return true;
8955 } else {
8956 return false;
8957 }
8958}
8959
8960/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008961 * FUNCTION : getPDStatIndex
8962 *
8963 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8964 *
8965 * PARAMETERS :
8966 * @caps : camera capabilities
8967 *
8968 * RETURN : int32_t type
8969 * non-negative - on success
8970 * -1 - on failure
8971 *==========================================================================*/
8972int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8973 if (nullptr == caps) {
8974 return -1;
8975 }
8976
8977 uint32_t metaRawCount = caps->meta_raw_channel_count;
8978 int32_t ret = -1;
8979 for (size_t i = 0; i < metaRawCount; i++) {
8980 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8981 ret = i;
8982 break;
8983 }
8984 }
8985
8986 return ret;
8987}
8988
8989/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008990 * FUNCTION : initStaticMetadata
8991 *
8992 * DESCRIPTION: initialize the static metadata
8993 *
8994 * PARAMETERS :
8995 * @cameraId : camera Id
8996 *
8997 * RETURN : int32_t type of status
8998 * 0 -- success
8999 * non-zero failure code
9000 *==========================================================================*/
9001int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9002{
9003 int rc = 0;
9004 CameraMetadata staticInfo;
9005 size_t count = 0;
9006 bool limitedDevice = false;
9007 char prop[PROPERTY_VALUE_MAX];
9008 bool supportBurst = false;
9009
9010 supportBurst = supportBurstCapture(cameraId);
9011
9012 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9013 * guaranteed or if min fps of max resolution is less than 20 fps, its
9014 * advertised as limited device*/
9015 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9016 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9017 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9018 !supportBurst;
9019
9020 uint8_t supportedHwLvl = limitedDevice ?
9021 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009022#ifndef USE_HAL_3_3
9023 // LEVEL_3 - This device will support level 3.
9024 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9025#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009026 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009027#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009028
9029 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9030 &supportedHwLvl, 1);
9031
9032 bool facingBack = false;
9033 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9034 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9035 facingBack = true;
9036 }
9037 /*HAL 3 only*/
9038 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9039 &gCamCapability[cameraId]->min_focus_distance, 1);
9040
9041 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9042 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9043
9044 /*should be using focal lengths but sensor doesn't provide that info now*/
9045 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9046 &gCamCapability[cameraId]->focal_length,
9047 1);
9048
9049 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9050 gCamCapability[cameraId]->apertures,
9051 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9052
9053 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9054 gCamCapability[cameraId]->filter_densities,
9055 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9056
9057
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009058 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9059 size_t mode_count =
9060 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9061 for (size_t i = 0; i < mode_count; i++) {
9062 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9063 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009064 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009065 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009066
9067 int32_t lens_shading_map_size[] = {
9068 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9069 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9070 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9071 lens_shading_map_size,
9072 sizeof(lens_shading_map_size)/sizeof(int32_t));
9073
9074 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9075 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9076
9077 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9078 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9079
9080 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9081 &gCamCapability[cameraId]->max_frame_duration, 1);
9082
9083 camera_metadata_rational baseGainFactor = {
9084 gCamCapability[cameraId]->base_gain_factor.numerator,
9085 gCamCapability[cameraId]->base_gain_factor.denominator};
9086 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9087 &baseGainFactor, 1);
9088
9089 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9090 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9091
9092 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9093 gCamCapability[cameraId]->pixel_array_size.height};
9094 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9095 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9096
9097 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9098 gCamCapability[cameraId]->active_array_size.top,
9099 gCamCapability[cameraId]->active_array_size.width,
9100 gCamCapability[cameraId]->active_array_size.height};
9101 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9102 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9103
9104 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9105 &gCamCapability[cameraId]->white_level, 1);
9106
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009107 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9108 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9109 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009110 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009111 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009112
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009113#ifndef USE_HAL_3_3
9114 bool hasBlackRegions = false;
9115 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9116 LOGW("black_region_count: %d is bounded to %d",
9117 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9118 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9119 }
9120 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9121 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9122 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9123 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9124 }
9125 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9126 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9127 hasBlackRegions = true;
9128 }
9129#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009130 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9131 &gCamCapability[cameraId]->flash_charge_duration, 1);
9132
9133 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9134 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9135
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009136 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9137 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9138 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009139 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9140 &timestampSource, 1);
9141
Thierry Strudel54dc9782017-02-15 12:12:10 -08009142 //update histogram vendor data
9143 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009144 &gCamCapability[cameraId]->histogram_size, 1);
9145
Thierry Strudel54dc9782017-02-15 12:12:10 -08009146 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 &gCamCapability[cameraId]->max_histogram_count, 1);
9148
Shuzhen Wang14415f52016-11-16 18:26:18 -08009149 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9150 //so that app can request fewer number of bins than the maximum supported.
9151 std::vector<int32_t> histBins;
9152 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9153 histBins.push_back(maxHistBins);
9154 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9155 (maxHistBins & 0x1) == 0) {
9156 histBins.push_back(maxHistBins >> 1);
9157 maxHistBins >>= 1;
9158 }
9159 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9160 histBins.data(), histBins.size());
9161
Thierry Strudel3d639192016-09-09 11:52:26 -07009162 int32_t sharpness_map_size[] = {
9163 gCamCapability[cameraId]->sharpness_map_size.width,
9164 gCamCapability[cameraId]->sharpness_map_size.height};
9165
9166 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9167 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9168
9169 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9170 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9171
Emilian Peev0f3c3162017-03-15 12:57:46 +00009172 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9173 if (0 <= indexPD) {
9174 // Advertise PD stats data as part of the Depth capabilities
9175 int32_t depthWidth =
9176 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9177 int32_t depthHeight =
9178 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9179 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9180 assert(0 < depthSamplesCount);
9181 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9182 &depthSamplesCount, 1);
9183
9184 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9185 depthHeight,
9186 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9187 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9188 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9189 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9190 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9191
9192 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9193 depthHeight, 33333333,
9194 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9195 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9196 depthMinDuration,
9197 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9198
9199 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9200 depthHeight, 0,
9201 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9202 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9203 depthStallDuration,
9204 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9205
9206 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9207 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9208 }
9209
Thierry Strudel3d639192016-09-09 11:52:26 -07009210 int32_t scalar_formats[] = {
9211 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9212 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9213 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9214 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9215 HAL_PIXEL_FORMAT_RAW10,
9216 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009217 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9218 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9219 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009220
9221 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9222 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9223 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9224 count, MAX_SIZES_CNT, available_processed_sizes);
9225 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9226 available_processed_sizes, count * 2);
9227
9228 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9229 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9230 makeTable(gCamCapability[cameraId]->raw_dim,
9231 count, MAX_SIZES_CNT, available_raw_sizes);
9232 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9233 available_raw_sizes, count * 2);
9234
9235 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9236 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9237 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9238 count, MAX_SIZES_CNT, available_fps_ranges);
9239 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9240 available_fps_ranges, count * 2);
9241
9242 camera_metadata_rational exposureCompensationStep = {
9243 gCamCapability[cameraId]->exp_compensation_step.numerator,
9244 gCamCapability[cameraId]->exp_compensation_step.denominator};
9245 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9246 &exposureCompensationStep, 1);
9247
9248 Vector<uint8_t> availableVstabModes;
9249 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9250 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009251 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009252 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009253 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009254 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009255 count = IS_TYPE_MAX;
9256 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9257 for (size_t i = 0; i < count; i++) {
9258 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9259 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9260 eisSupported = true;
9261 break;
9262 }
9263 }
9264 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009265 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9266 }
9267 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9268 availableVstabModes.array(), availableVstabModes.size());
9269
9270 /*HAL 1 and HAL 3 common*/
9271 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9272 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9273 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009274 // Cap the max zoom to the max preferred value
9275 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009276 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9277 &maxZoom, 1);
9278
9279 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9280 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9281
9282 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9283 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9284 max3aRegions[2] = 0; /* AF not supported */
9285 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9286 max3aRegions, 3);
9287
9288 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9289 memset(prop, 0, sizeof(prop));
9290 property_get("persist.camera.facedetect", prop, "1");
9291 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9292 LOGD("Support face detection mode: %d",
9293 supportedFaceDetectMode);
9294
9295 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009296 /* support mode should be OFF if max number of face is 0 */
9297 if (maxFaces <= 0) {
9298 supportedFaceDetectMode = 0;
9299 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009300 Vector<uint8_t> availableFaceDetectModes;
9301 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9302 if (supportedFaceDetectMode == 1) {
9303 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9304 } else if (supportedFaceDetectMode == 2) {
9305 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9306 } else if (supportedFaceDetectMode == 3) {
9307 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9308 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9309 } else {
9310 maxFaces = 0;
9311 }
9312 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9313 availableFaceDetectModes.array(),
9314 availableFaceDetectModes.size());
9315 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9316 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009317 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9318 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9319 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009320
9321 int32_t exposureCompensationRange[] = {
9322 gCamCapability[cameraId]->exposure_compensation_min,
9323 gCamCapability[cameraId]->exposure_compensation_max};
9324 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9325 exposureCompensationRange,
9326 sizeof(exposureCompensationRange)/sizeof(int32_t));
9327
9328 uint8_t lensFacing = (facingBack) ?
9329 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9330 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9331
9332 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9333 available_thumbnail_sizes,
9334 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9335
9336 /*all sizes will be clubbed into this tag*/
9337 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9338 /*android.scaler.availableStreamConfigurations*/
9339 Vector<int32_t> available_stream_configs;
9340 cam_dimension_t active_array_dim;
9341 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9342 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009343
9344 /*advertise list of input dimensions supported based on below property.
9345 By default all sizes upto 5MP will be advertised.
9346 Note that the setprop resolution format should be WxH.
9347 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9348 To list all supported sizes, setprop needs to be set with "0x0" */
9349 cam_dimension_t minInputSize = {2592,1944}; //5MP
9350 memset(prop, 0, sizeof(prop));
9351 property_get("persist.camera.input.minsize", prop, "2592x1944");
9352 if (strlen(prop) > 0) {
9353 char *saveptr = NULL;
9354 char *token = strtok_r(prop, "x", &saveptr);
9355 if (token != NULL) {
9356 minInputSize.width = atoi(token);
9357 }
9358 token = strtok_r(NULL, "x", &saveptr);
9359 if (token != NULL) {
9360 minInputSize.height = atoi(token);
9361 }
9362 }
9363
Thierry Strudel3d639192016-09-09 11:52:26 -07009364 /* Add input/output stream configurations for each scalar formats*/
9365 for (size_t j = 0; j < scalar_formats_count; j++) {
9366 switch (scalar_formats[j]) {
9367 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9368 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9369 case HAL_PIXEL_FORMAT_RAW10:
9370 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9371 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9372 addStreamConfig(available_stream_configs, scalar_formats[j],
9373 gCamCapability[cameraId]->raw_dim[i],
9374 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9375 }
9376 break;
9377 case HAL_PIXEL_FORMAT_BLOB:
9378 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9379 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9380 addStreamConfig(available_stream_configs, scalar_formats[j],
9381 gCamCapability[cameraId]->picture_sizes_tbl[i],
9382 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9383 }
9384 break;
9385 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9386 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9387 default:
9388 cam_dimension_t largest_picture_size;
9389 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9390 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9391 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9392 addStreamConfig(available_stream_configs, scalar_formats[j],
9393 gCamCapability[cameraId]->picture_sizes_tbl[i],
9394 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009395 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009396 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9397 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009398 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9399 >= minInputSize.width) || (gCamCapability[cameraId]->
9400 picture_sizes_tbl[i].height >= minInputSize.height)) {
9401 addStreamConfig(available_stream_configs, scalar_formats[j],
9402 gCamCapability[cameraId]->picture_sizes_tbl[i],
9403 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9404 }
9405 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009406 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009407
Thierry Strudel3d639192016-09-09 11:52:26 -07009408 break;
9409 }
9410 }
9411
9412 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9413 available_stream_configs.array(), available_stream_configs.size());
9414 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9415 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9416
9417 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9418 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9419
9420 /* android.scaler.availableMinFrameDurations */
9421 Vector<int64_t> available_min_durations;
9422 for (size_t j = 0; j < scalar_formats_count; j++) {
9423 switch (scalar_formats[j]) {
9424 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9425 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9426 case HAL_PIXEL_FORMAT_RAW10:
9427 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9428 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9429 available_min_durations.add(scalar_formats[j]);
9430 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9431 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9432 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9433 }
9434 break;
9435 default:
9436 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9437 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9438 available_min_durations.add(scalar_formats[j]);
9439 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9440 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9441 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9442 }
9443 break;
9444 }
9445 }
9446 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9447 available_min_durations.array(), available_min_durations.size());
9448
9449 Vector<int32_t> available_hfr_configs;
9450 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9451 int32_t fps = 0;
9452 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9453 case CAM_HFR_MODE_60FPS:
9454 fps = 60;
9455 break;
9456 case CAM_HFR_MODE_90FPS:
9457 fps = 90;
9458 break;
9459 case CAM_HFR_MODE_120FPS:
9460 fps = 120;
9461 break;
9462 case CAM_HFR_MODE_150FPS:
9463 fps = 150;
9464 break;
9465 case CAM_HFR_MODE_180FPS:
9466 fps = 180;
9467 break;
9468 case CAM_HFR_MODE_210FPS:
9469 fps = 210;
9470 break;
9471 case CAM_HFR_MODE_240FPS:
9472 fps = 240;
9473 break;
9474 case CAM_HFR_MODE_480FPS:
9475 fps = 480;
9476 break;
9477 case CAM_HFR_MODE_OFF:
9478 case CAM_HFR_MODE_MAX:
9479 default:
9480 break;
9481 }
9482
9483 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9484 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9485 /* For each HFR frame rate, need to advertise one variable fps range
9486 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9487 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9488 * set by the app. When video recording is started, [120, 120] is
9489 * set. This way sensor configuration does not change when recording
9490 * is started */
9491
9492 /* (width, height, fps_min, fps_max, batch_size_max) */
9493 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9494 j < MAX_SIZES_CNT; j++) {
9495 available_hfr_configs.add(
9496 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9497 available_hfr_configs.add(
9498 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9499 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9500 available_hfr_configs.add(fps);
9501 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9502
9503 /* (width, height, fps_min, fps_max, batch_size_max) */
9504 available_hfr_configs.add(
9505 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9506 available_hfr_configs.add(
9507 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9508 available_hfr_configs.add(fps);
9509 available_hfr_configs.add(fps);
9510 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9511 }
9512 }
9513 }
9514 //Advertise HFR capability only if the property is set
9515 memset(prop, 0, sizeof(prop));
9516 property_get("persist.camera.hal3hfr.enable", prop, "1");
9517 uint8_t hfrEnable = (uint8_t)atoi(prop);
9518
9519 if(hfrEnable && available_hfr_configs.array()) {
9520 staticInfo.update(
9521 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9522 available_hfr_configs.array(), available_hfr_configs.size());
9523 }
9524
9525 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9526 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9527 &max_jpeg_size, 1);
9528
9529 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9530 size_t size = 0;
9531 count = CAM_EFFECT_MODE_MAX;
9532 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9533 for (size_t i = 0; i < count; i++) {
9534 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9535 gCamCapability[cameraId]->supported_effects[i]);
9536 if (NAME_NOT_FOUND != val) {
9537 avail_effects[size] = (uint8_t)val;
9538 size++;
9539 }
9540 }
9541 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9542 avail_effects,
9543 size);
9544
9545 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9546 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9547 size_t supported_scene_modes_cnt = 0;
9548 count = CAM_SCENE_MODE_MAX;
9549 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9550 for (size_t i = 0; i < count; i++) {
9551 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9552 CAM_SCENE_MODE_OFF) {
9553 int val = lookupFwkName(SCENE_MODES_MAP,
9554 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9555 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009556
Thierry Strudel3d639192016-09-09 11:52:26 -07009557 if (NAME_NOT_FOUND != val) {
9558 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9559 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9560 supported_scene_modes_cnt++;
9561 }
9562 }
9563 }
9564 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9565 avail_scene_modes,
9566 supported_scene_modes_cnt);
9567
9568 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9569 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9570 supported_scene_modes_cnt,
9571 CAM_SCENE_MODE_MAX,
9572 scene_mode_overrides,
9573 supported_indexes,
9574 cameraId);
9575
9576 if (supported_scene_modes_cnt == 0) {
9577 supported_scene_modes_cnt = 1;
9578 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9579 }
9580
9581 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9582 scene_mode_overrides, supported_scene_modes_cnt * 3);
9583
9584 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9585 ANDROID_CONTROL_MODE_AUTO,
9586 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9587 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9588 available_control_modes,
9589 3);
9590
9591 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9592 size = 0;
9593 count = CAM_ANTIBANDING_MODE_MAX;
9594 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9595 for (size_t i = 0; i < count; i++) {
9596 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9597 gCamCapability[cameraId]->supported_antibandings[i]);
9598 if (NAME_NOT_FOUND != val) {
9599 avail_antibanding_modes[size] = (uint8_t)val;
9600 size++;
9601 }
9602
9603 }
9604 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9605 avail_antibanding_modes,
9606 size);
9607
9608 uint8_t avail_abberation_modes[] = {
9609 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9610 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9611 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9612 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9613 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9614 if (0 == count) {
9615 // If no aberration correction modes are available for a device, this advertise OFF mode
9616 size = 1;
9617 } else {
9618 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9619 // So, advertize all 3 modes if atleast any one mode is supported as per the
9620 // new M requirement
9621 size = 3;
9622 }
9623 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9624 avail_abberation_modes,
9625 size);
9626
9627 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9628 size = 0;
9629 count = CAM_FOCUS_MODE_MAX;
9630 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9631 for (size_t i = 0; i < count; i++) {
9632 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9633 gCamCapability[cameraId]->supported_focus_modes[i]);
9634 if (NAME_NOT_FOUND != val) {
9635 avail_af_modes[size] = (uint8_t)val;
9636 size++;
9637 }
9638 }
9639 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9640 avail_af_modes,
9641 size);
9642
9643 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9644 size = 0;
9645 count = CAM_WB_MODE_MAX;
9646 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9647 for (size_t i = 0; i < count; i++) {
9648 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9649 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9650 gCamCapability[cameraId]->supported_white_balances[i]);
9651 if (NAME_NOT_FOUND != val) {
9652 avail_awb_modes[size] = (uint8_t)val;
9653 size++;
9654 }
9655 }
9656 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9657 avail_awb_modes,
9658 size);
9659
9660 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9661 count = CAM_FLASH_FIRING_LEVEL_MAX;
9662 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9663 count);
9664 for (size_t i = 0; i < count; i++) {
9665 available_flash_levels[i] =
9666 gCamCapability[cameraId]->supported_firing_levels[i];
9667 }
9668 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9669 available_flash_levels, count);
9670
9671 uint8_t flashAvailable;
9672 if (gCamCapability[cameraId]->flash_available)
9673 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9674 else
9675 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9676 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9677 &flashAvailable, 1);
9678
9679 Vector<uint8_t> avail_ae_modes;
9680 count = CAM_AE_MODE_MAX;
9681 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9682 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009683 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9684 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9685 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9686 }
9687 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009688 }
9689 if (flashAvailable) {
9690 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9691 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9692 }
9693 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9694 avail_ae_modes.array(),
9695 avail_ae_modes.size());
9696
9697 int32_t sensitivity_range[2];
9698 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9699 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9700 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9701 sensitivity_range,
9702 sizeof(sensitivity_range) / sizeof(int32_t));
9703
9704 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9705 &gCamCapability[cameraId]->max_analog_sensitivity,
9706 1);
9707
9708 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9709 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9710 &sensor_orientation,
9711 1);
9712
9713 int32_t max_output_streams[] = {
9714 MAX_STALLING_STREAMS,
9715 MAX_PROCESSED_STREAMS,
9716 MAX_RAW_STREAMS};
9717 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9718 max_output_streams,
9719 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9720
9721 uint8_t avail_leds = 0;
9722 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9723 &avail_leds, 0);
9724
9725 uint8_t focus_dist_calibrated;
9726 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9727 gCamCapability[cameraId]->focus_dist_calibrated);
9728 if (NAME_NOT_FOUND != val) {
9729 focus_dist_calibrated = (uint8_t)val;
9730 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9731 &focus_dist_calibrated, 1);
9732 }
9733
9734 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9735 size = 0;
9736 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9737 MAX_TEST_PATTERN_CNT);
9738 for (size_t i = 0; i < count; i++) {
9739 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9740 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9741 if (NAME_NOT_FOUND != testpatternMode) {
9742 avail_testpattern_modes[size] = testpatternMode;
9743 size++;
9744 }
9745 }
9746 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9747 avail_testpattern_modes,
9748 size);
9749
9750 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9751 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9752 &max_pipeline_depth,
9753 1);
9754
9755 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9756 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9757 &partial_result_count,
9758 1);
9759
9760 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9761 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9762
9763 Vector<uint8_t> available_capabilities;
9764 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9765 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9766 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9767 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9768 if (supportBurst) {
9769 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9770 }
9771 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9772 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9773 if (hfrEnable && available_hfr_configs.array()) {
9774 available_capabilities.add(
9775 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9776 }
9777
9778 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9779 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9780 }
9781 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9782 available_capabilities.array(),
9783 available_capabilities.size());
9784
9785 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9786 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9787 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9788 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9789
9790 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9791 &aeLockAvailable, 1);
9792
9793 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9794 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9795 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9796 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9797
9798 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9799 &awbLockAvailable, 1);
9800
9801 int32_t max_input_streams = 1;
9802 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9803 &max_input_streams,
9804 1);
9805
9806 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9807 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9808 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9809 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9810 HAL_PIXEL_FORMAT_YCbCr_420_888};
9811 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9812 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9813
9814 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9815 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9816 &max_latency,
9817 1);
9818
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009819#ifndef USE_HAL_3_3
9820 int32_t isp_sensitivity_range[2];
9821 isp_sensitivity_range[0] =
9822 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9823 isp_sensitivity_range[1] =
9824 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9825 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9826 isp_sensitivity_range,
9827 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9828#endif
9829
Thierry Strudel3d639192016-09-09 11:52:26 -07009830 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9831 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9832 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9833 available_hot_pixel_modes,
9834 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9835
9836 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9837 ANDROID_SHADING_MODE_FAST,
9838 ANDROID_SHADING_MODE_HIGH_QUALITY};
9839 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9840 available_shading_modes,
9841 3);
9842
9843 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9844 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9845 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9846 available_lens_shading_map_modes,
9847 2);
9848
9849 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9850 ANDROID_EDGE_MODE_FAST,
9851 ANDROID_EDGE_MODE_HIGH_QUALITY,
9852 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9853 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9854 available_edge_modes,
9855 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9856
9857 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9858 ANDROID_NOISE_REDUCTION_MODE_FAST,
9859 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9860 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9861 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9862 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9863 available_noise_red_modes,
9864 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9865
9866 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9867 ANDROID_TONEMAP_MODE_FAST,
9868 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9869 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9870 available_tonemap_modes,
9871 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9872
9873 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9874 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9875 available_hot_pixel_map_modes,
9876 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9877
9878 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9879 gCamCapability[cameraId]->reference_illuminant1);
9880 if (NAME_NOT_FOUND != val) {
9881 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9882 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9883 }
9884
9885 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9886 gCamCapability[cameraId]->reference_illuminant2);
9887 if (NAME_NOT_FOUND != val) {
9888 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9889 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9890 }
9891
9892 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9893 (void *)gCamCapability[cameraId]->forward_matrix1,
9894 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9895
9896 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9897 (void *)gCamCapability[cameraId]->forward_matrix2,
9898 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9899
9900 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9901 (void *)gCamCapability[cameraId]->color_transform1,
9902 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9903
9904 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9905 (void *)gCamCapability[cameraId]->color_transform2,
9906 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9907
9908 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9909 (void *)gCamCapability[cameraId]->calibration_transform1,
9910 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9911
9912 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9913 (void *)gCamCapability[cameraId]->calibration_transform2,
9914 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9915
9916 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9917 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9918 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9919 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9920 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9921 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9922 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9923 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9924 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9925 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9926 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9927 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9928 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9929 ANDROID_JPEG_GPS_COORDINATES,
9930 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9931 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9932 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9933 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9934 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9935 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9936 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9937 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9938 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9939 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009940#ifndef USE_HAL_3_3
9941 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9942#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009943 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009944 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009945 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9946 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009947 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009948 /* DevCamDebug metadata request_keys_basic */
9949 DEVCAMDEBUG_META_ENABLE,
9950 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009951 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009952 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009953 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009954 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009955 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009956
9957 size_t request_keys_cnt =
9958 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9959 Vector<int32_t> available_request_keys;
9960 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9961 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9962 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9963 }
9964
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009965 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +00009966 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009967 }
9968
Thierry Strudel3d639192016-09-09 11:52:26 -07009969 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9970 available_request_keys.array(), available_request_keys.size());
9971
9972 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9973 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9974 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9975 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9976 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9977 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9978 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9979 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9980 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9981 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9982 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9983 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9984 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9985 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9986 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9987 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9988 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009989 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009990 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9991 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9992 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009993 ANDROID_STATISTICS_FACE_SCORES,
9994#ifndef USE_HAL_3_3
9995 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9996#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009997 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009998 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009999 // DevCamDebug metadata result_keys_basic
10000 DEVCAMDEBUG_META_ENABLE,
10001 // DevCamDebug metadata result_keys AF
10002 DEVCAMDEBUG_AF_LENS_POSITION,
10003 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10004 DEVCAMDEBUG_AF_TOF_DISTANCE,
10005 DEVCAMDEBUG_AF_LUMA,
10006 DEVCAMDEBUG_AF_HAF_STATE,
10007 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10008 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10009 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10010 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10011 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10012 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10013 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10014 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10015 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10016 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10017 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10018 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10019 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10020 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10021 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10022 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10023 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10024 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10025 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10026 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10027 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10028 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10029 // DevCamDebug metadata result_keys AEC
10030 DEVCAMDEBUG_AEC_TARGET_LUMA,
10031 DEVCAMDEBUG_AEC_COMP_LUMA,
10032 DEVCAMDEBUG_AEC_AVG_LUMA,
10033 DEVCAMDEBUG_AEC_CUR_LUMA,
10034 DEVCAMDEBUG_AEC_LINECOUNT,
10035 DEVCAMDEBUG_AEC_REAL_GAIN,
10036 DEVCAMDEBUG_AEC_EXP_INDEX,
10037 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010038 // DevCamDebug metadata result_keys zzHDR
10039 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10040 DEVCAMDEBUG_AEC_L_LINECOUNT,
10041 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10042 DEVCAMDEBUG_AEC_S_LINECOUNT,
10043 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10044 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10045 // DevCamDebug metadata result_keys ADRC
10046 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10047 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10048 DEVCAMDEBUG_AEC_GTM_RATIO,
10049 DEVCAMDEBUG_AEC_LTM_RATIO,
10050 DEVCAMDEBUG_AEC_LA_RATIO,
10051 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010052 // DevCamDebug metadata result_keys AWB
10053 DEVCAMDEBUG_AWB_R_GAIN,
10054 DEVCAMDEBUG_AWB_G_GAIN,
10055 DEVCAMDEBUG_AWB_B_GAIN,
10056 DEVCAMDEBUG_AWB_CCT,
10057 DEVCAMDEBUG_AWB_DECISION,
10058 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010059 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10060 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10061 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010062 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010063 };
10064
Thierry Strudel3d639192016-09-09 11:52:26 -070010065 size_t result_keys_cnt =
10066 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10067
10068 Vector<int32_t> available_result_keys;
10069 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10070 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10071 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10072 }
10073 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10074 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10075 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10076 }
10077 if (supportedFaceDetectMode == 1) {
10078 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10079 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10080 } else if ((supportedFaceDetectMode == 2) ||
10081 (supportedFaceDetectMode == 3)) {
10082 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10083 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10084 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010085#ifndef USE_HAL_3_3
10086 if (hasBlackRegions) {
10087 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10088 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10089 }
10090#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010091
10092 if (gExposeEnableZslKey) {
10093 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10094 }
10095
Thierry Strudel3d639192016-09-09 11:52:26 -070010096 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10097 available_result_keys.array(), available_result_keys.size());
10098
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010099 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010100 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10101 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10102 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10103 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10104 ANDROID_SCALER_CROPPING_TYPE,
10105 ANDROID_SYNC_MAX_LATENCY,
10106 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10107 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10108 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10109 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10110 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10111 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10112 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10113 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10114 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10115 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10116 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10117 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10118 ANDROID_LENS_FACING,
10119 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10120 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10121 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10122 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10123 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10124 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10125 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10126 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10127 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10128 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10129 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10130 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10131 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10132 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10133 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10134 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10135 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10136 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10137 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10138 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010139 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010140 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10141 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10142 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10143 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10144 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10145 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10146 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10147 ANDROID_CONTROL_AVAILABLE_MODES,
10148 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10149 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10150 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10151 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010152 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10153#ifndef USE_HAL_3_3
10154 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10155 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10156#endif
10157 };
10158
10159 Vector<int32_t> available_characteristics_keys;
10160 available_characteristics_keys.appendArray(characteristics_keys_basic,
10161 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10162#ifndef USE_HAL_3_3
10163 if (hasBlackRegions) {
10164 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10165 }
10166#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010167
10168 if (0 <= indexPD) {
10169 int32_t depthKeys[] = {
10170 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10171 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10172 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10173 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10174 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10175 };
10176 available_characteristics_keys.appendArray(depthKeys,
10177 sizeof(depthKeys) / sizeof(depthKeys[0]));
10178 }
10179
Thierry Strudel3d639192016-09-09 11:52:26 -070010180 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010181 available_characteristics_keys.array(),
10182 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010183
10184 /*available stall durations depend on the hw + sw and will be different for different devices */
10185 /*have to add for raw after implementation*/
10186 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10187 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10188
10189 Vector<int64_t> available_stall_durations;
10190 for (uint32_t j = 0; j < stall_formats_count; j++) {
10191 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10192 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10193 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10194 available_stall_durations.add(stall_formats[j]);
10195 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10196 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10197 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10198 }
10199 } else {
10200 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10201 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10202 available_stall_durations.add(stall_formats[j]);
10203 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10204 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10205 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10206 }
10207 }
10208 }
10209 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10210 available_stall_durations.array(),
10211 available_stall_durations.size());
10212
10213 //QCAMERA3_OPAQUE_RAW
10214 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10215 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10216 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10217 case LEGACY_RAW:
10218 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10219 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10220 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10221 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10222 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10223 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10224 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10225 break;
10226 case MIPI_RAW:
10227 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10228 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10229 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10230 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10231 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10232 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10233 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10234 break;
10235 default:
10236 LOGE("unknown opaque_raw_format %d",
10237 gCamCapability[cameraId]->opaque_raw_fmt);
10238 break;
10239 }
10240 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10241
10242 Vector<int32_t> strides;
10243 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10244 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10245 cam_stream_buf_plane_info_t buf_planes;
10246 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10247 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10248 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10249 &gCamCapability[cameraId]->padding_info, &buf_planes);
10250 strides.add(buf_planes.plane_info.mp[0].stride);
10251 }
10252 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10253 strides.size());
10254
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010255 //TBD: remove the following line once backend advertises zzHDR in feature mask
10256 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010257 //Video HDR default
10258 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10259 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010260 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010261 int32_t vhdr_mode[] = {
10262 QCAMERA3_VIDEO_HDR_MODE_OFF,
10263 QCAMERA3_VIDEO_HDR_MODE_ON};
10264
10265 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10266 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10267 vhdr_mode, vhdr_mode_count);
10268 }
10269
Thierry Strudel3d639192016-09-09 11:52:26 -070010270 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10271 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10272 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10273
10274 uint8_t isMonoOnly =
10275 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10276 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10277 &isMonoOnly, 1);
10278
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010279#ifndef USE_HAL_3_3
10280 Vector<int32_t> opaque_size;
10281 for (size_t j = 0; j < scalar_formats_count; j++) {
10282 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10283 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10284 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10285 cam_stream_buf_plane_info_t buf_planes;
10286
10287 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10288 &gCamCapability[cameraId]->padding_info, &buf_planes);
10289
10290 if (rc == 0) {
10291 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10292 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10293 opaque_size.add(buf_planes.plane_info.frame_len);
10294 }else {
10295 LOGE("raw frame calculation failed!");
10296 }
10297 }
10298 }
10299 }
10300
10301 if ((opaque_size.size() > 0) &&
10302 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10303 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10304 else
10305 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10306#endif
10307
Thierry Strudel04e026f2016-10-10 11:27:36 -070010308 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10309 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10310 size = 0;
10311 count = CAM_IR_MODE_MAX;
10312 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10313 for (size_t i = 0; i < count; i++) {
10314 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10315 gCamCapability[cameraId]->supported_ir_modes[i]);
10316 if (NAME_NOT_FOUND != val) {
10317 avail_ir_modes[size] = (int32_t)val;
10318 size++;
10319 }
10320 }
10321 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10322 avail_ir_modes, size);
10323 }
10324
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010325 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10326 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10327 size = 0;
10328 count = CAM_AEC_CONVERGENCE_MAX;
10329 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10330 for (size_t i = 0; i < count; i++) {
10331 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10332 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10333 if (NAME_NOT_FOUND != val) {
10334 available_instant_aec_modes[size] = (int32_t)val;
10335 size++;
10336 }
10337 }
10338 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10339 available_instant_aec_modes, size);
10340 }
10341
Thierry Strudel54dc9782017-02-15 12:12:10 -080010342 int32_t sharpness_range[] = {
10343 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10344 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10345 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10346
10347 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10348 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10349 size = 0;
10350 count = CAM_BINNING_CORRECTION_MODE_MAX;
10351 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10352 for (size_t i = 0; i < count; i++) {
10353 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10354 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10355 gCamCapability[cameraId]->supported_binning_modes[i]);
10356 if (NAME_NOT_FOUND != val) {
10357 avail_binning_modes[size] = (int32_t)val;
10358 size++;
10359 }
10360 }
10361 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10362 avail_binning_modes, size);
10363 }
10364
10365 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10366 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10367 size = 0;
10368 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10369 for (size_t i = 0; i < count; i++) {
10370 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10371 gCamCapability[cameraId]->supported_aec_modes[i]);
10372 if (NAME_NOT_FOUND != val)
10373 available_aec_modes[size++] = val;
10374 }
10375 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10376 available_aec_modes, size);
10377 }
10378
10379 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10380 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10381 size = 0;
10382 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10383 for (size_t i = 0; i < count; i++) {
10384 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10385 gCamCapability[cameraId]->supported_iso_modes[i]);
10386 if (NAME_NOT_FOUND != val)
10387 available_iso_modes[size++] = val;
10388 }
10389 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10390 available_iso_modes, size);
10391 }
10392
10393 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010394 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010395 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10396 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10397 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10398
10399 int32_t available_saturation_range[4];
10400 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10401 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10402 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10403 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10404 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10405 available_saturation_range, 4);
10406
10407 uint8_t is_hdr_values[2];
10408 is_hdr_values[0] = 0;
10409 is_hdr_values[1] = 1;
10410 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10411 is_hdr_values, 2);
10412
10413 float is_hdr_confidence_range[2];
10414 is_hdr_confidence_range[0] = 0.0;
10415 is_hdr_confidence_range[1] = 1.0;
10416 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10417 is_hdr_confidence_range, 2);
10418
Emilian Peev0a972ef2017-03-16 10:25:53 +000010419 size_t eepromLength = strnlen(
10420 reinterpret_cast<const char *>(
10421 gCamCapability[cameraId]->eeprom_version_info),
10422 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10423 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010424 char easelInfo[] = ",E:N";
10425 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10426 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10427 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010428 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10429 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010430 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010431 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10432 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10433 }
10434
Thierry Strudel3d639192016-09-09 11:52:26 -070010435 gStaticMetadata[cameraId] = staticInfo.release();
10436 return rc;
10437}
10438
10439/*===========================================================================
10440 * FUNCTION : makeTable
10441 *
10442 * DESCRIPTION: make a table of sizes
10443 *
10444 * PARAMETERS :
10445 *
10446 *
10447 *==========================================================================*/
10448void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10449 size_t max_size, int32_t *sizeTable)
10450{
10451 size_t j = 0;
10452 if (size > max_size) {
10453 size = max_size;
10454 }
10455 for (size_t i = 0; i < size; i++) {
10456 sizeTable[j] = dimTable[i].width;
10457 sizeTable[j+1] = dimTable[i].height;
10458 j+=2;
10459 }
10460}
10461
10462/*===========================================================================
10463 * FUNCTION : makeFPSTable
10464 *
10465 * DESCRIPTION: make a table of fps ranges
10466 *
10467 * PARAMETERS :
10468 *
10469 *==========================================================================*/
10470void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10471 size_t max_size, int32_t *fpsRangesTable)
10472{
10473 size_t j = 0;
10474 if (size > max_size) {
10475 size = max_size;
10476 }
10477 for (size_t i = 0; i < size; i++) {
10478 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10479 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10480 j+=2;
10481 }
10482}
10483
10484/*===========================================================================
10485 * FUNCTION : makeOverridesList
10486 *
10487 * DESCRIPTION: make a list of scene mode overrides
10488 *
10489 * PARAMETERS :
10490 *
10491 *
10492 *==========================================================================*/
10493void QCamera3HardwareInterface::makeOverridesList(
10494 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10495 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10496{
10497 /*daemon will give a list of overrides for all scene modes.
10498 However we should send the fwk only the overrides for the scene modes
10499 supported by the framework*/
10500 size_t j = 0;
10501 if (size > max_size) {
10502 size = max_size;
10503 }
10504 size_t focus_count = CAM_FOCUS_MODE_MAX;
10505 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10506 focus_count);
10507 for (size_t i = 0; i < size; i++) {
10508 bool supt = false;
10509 size_t index = supported_indexes[i];
10510 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10511 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10512 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10513 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10514 overridesTable[index].awb_mode);
10515 if (NAME_NOT_FOUND != val) {
10516 overridesList[j+1] = (uint8_t)val;
10517 }
10518 uint8_t focus_override = overridesTable[index].af_mode;
10519 for (size_t k = 0; k < focus_count; k++) {
10520 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10521 supt = true;
10522 break;
10523 }
10524 }
10525 if (supt) {
10526 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10527 focus_override);
10528 if (NAME_NOT_FOUND != val) {
10529 overridesList[j+2] = (uint8_t)val;
10530 }
10531 } else {
10532 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10533 }
10534 j+=3;
10535 }
10536}
10537
10538/*===========================================================================
10539 * FUNCTION : filterJpegSizes
10540 *
10541 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10542 * could be downscaled to
10543 *
10544 * PARAMETERS :
10545 *
10546 * RETURN : length of jpegSizes array
10547 *==========================================================================*/
10548
10549size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10550 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10551 uint8_t downscale_factor)
10552{
10553 if (0 == downscale_factor) {
10554 downscale_factor = 1;
10555 }
10556
10557 int32_t min_width = active_array_size.width / downscale_factor;
10558 int32_t min_height = active_array_size.height / downscale_factor;
10559 size_t jpegSizesCnt = 0;
10560 if (processedSizesCnt > maxCount) {
10561 processedSizesCnt = maxCount;
10562 }
10563 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10564 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10565 jpegSizes[jpegSizesCnt] = processedSizes[i];
10566 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10567 jpegSizesCnt += 2;
10568 }
10569 }
10570 return jpegSizesCnt;
10571}
10572
10573/*===========================================================================
10574 * FUNCTION : computeNoiseModelEntryS
10575 *
10576 * DESCRIPTION: function to map a given sensitivity to the S noise
10577 * model parameters in the DNG noise model.
10578 *
10579 * PARAMETERS : sens : the sensor sensitivity
10580 *
10581 ** RETURN : S (sensor amplification) noise
10582 *
10583 *==========================================================================*/
10584double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10585 double s = gCamCapability[mCameraId]->gradient_S * sens +
10586 gCamCapability[mCameraId]->offset_S;
10587 return ((s < 0.0) ? 0.0 : s);
10588}
10589
10590/*===========================================================================
10591 * FUNCTION : computeNoiseModelEntryO
10592 *
10593 * DESCRIPTION: function to map a given sensitivity to the O noise
10594 * model parameters in the DNG noise model.
10595 *
10596 * PARAMETERS : sens : the sensor sensitivity
10597 *
10598 ** RETURN : O (sensor readout) noise
10599 *
10600 *==========================================================================*/
10601double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10602 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10603 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10604 1.0 : (1.0 * sens / max_analog_sens);
10605 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10606 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10607 return ((o < 0.0) ? 0.0 : o);
10608}
10609
10610/*===========================================================================
10611 * FUNCTION : getSensorSensitivity
10612 *
10613 * DESCRIPTION: convert iso_mode to an integer value
10614 *
10615 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10616 *
10617 ** RETURN : sensitivity supported by sensor
10618 *
10619 *==========================================================================*/
10620int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10621{
10622 int32_t sensitivity;
10623
10624 switch (iso_mode) {
10625 case CAM_ISO_MODE_100:
10626 sensitivity = 100;
10627 break;
10628 case CAM_ISO_MODE_200:
10629 sensitivity = 200;
10630 break;
10631 case CAM_ISO_MODE_400:
10632 sensitivity = 400;
10633 break;
10634 case CAM_ISO_MODE_800:
10635 sensitivity = 800;
10636 break;
10637 case CAM_ISO_MODE_1600:
10638 sensitivity = 1600;
10639 break;
10640 default:
10641 sensitivity = -1;
10642 break;
10643 }
10644 return sensitivity;
10645}
10646
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010647int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010648 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010649 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10650 // to connect to Easel.
10651 bool doNotpowerOnEasel =
10652 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10653
10654 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010655 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10656 return OK;
10657 }
10658
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010659 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010660 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010661 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010662 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010663 return res;
10664 }
10665
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010666 EaselManagerClientOpened = true;
10667
10668 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010669 if (res != OK) {
10670 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10671 }
10672
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010673 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010674 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010675
10676 // Expose enableZsl key only when HDR+ mode is enabled.
10677 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010678 }
10679
10680 return OK;
10681}
10682
Thierry Strudel3d639192016-09-09 11:52:26 -070010683/*===========================================================================
10684 * FUNCTION : getCamInfo
10685 *
10686 * DESCRIPTION: query camera capabilities
10687 *
10688 * PARAMETERS :
10689 * @cameraId : camera Id
10690 * @info : camera info struct to be filled in with camera capabilities
10691 *
10692 * RETURN : int type of status
10693 * NO_ERROR -- success
10694 * none-zero failure code
10695 *==========================================================================*/
10696int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10697 struct camera_info *info)
10698{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010699 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010700 int rc = 0;
10701
10702 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010703
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010704 {
10705 Mutex::Autolock l(gHdrPlusClientLock);
10706 rc = initHdrPlusClientLocked();
10707 if (rc != OK) {
10708 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10709 pthread_mutex_unlock(&gCamLock);
10710 return rc;
10711 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010712 }
10713
Thierry Strudel3d639192016-09-09 11:52:26 -070010714 if (NULL == gCamCapability[cameraId]) {
10715 rc = initCapabilities(cameraId);
10716 if (rc < 0) {
10717 pthread_mutex_unlock(&gCamLock);
10718 return rc;
10719 }
10720 }
10721
10722 if (NULL == gStaticMetadata[cameraId]) {
10723 rc = initStaticMetadata(cameraId);
10724 if (rc < 0) {
10725 pthread_mutex_unlock(&gCamLock);
10726 return rc;
10727 }
10728 }
10729
10730 switch(gCamCapability[cameraId]->position) {
10731 case CAM_POSITION_BACK:
10732 case CAM_POSITION_BACK_AUX:
10733 info->facing = CAMERA_FACING_BACK;
10734 break;
10735
10736 case CAM_POSITION_FRONT:
10737 case CAM_POSITION_FRONT_AUX:
10738 info->facing = CAMERA_FACING_FRONT;
10739 break;
10740
10741 default:
10742 LOGE("Unknown position type %d for camera id:%d",
10743 gCamCapability[cameraId]->position, cameraId);
10744 rc = -1;
10745 break;
10746 }
10747
10748
10749 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010750#ifndef USE_HAL_3_3
10751 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10752#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010753 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010754#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010755 info->static_camera_characteristics = gStaticMetadata[cameraId];
10756
10757 //For now assume both cameras can operate independently.
10758 info->conflicting_devices = NULL;
10759 info->conflicting_devices_length = 0;
10760
10761 //resource cost is 100 * MIN(1.0, m/M),
10762 //where m is throughput requirement with maximum stream configuration
10763 //and M is CPP maximum throughput.
10764 float max_fps = 0.0;
10765 for (uint32_t i = 0;
10766 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10767 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10768 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10769 }
10770 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10771 gCamCapability[cameraId]->active_array_size.width *
10772 gCamCapability[cameraId]->active_array_size.height * max_fps /
10773 gCamCapability[cameraId]->max_pixel_bandwidth;
10774 info->resource_cost = 100 * MIN(1.0, ratio);
10775 LOGI("camera %d resource cost is %d", cameraId,
10776 info->resource_cost);
10777
10778 pthread_mutex_unlock(&gCamLock);
10779 return rc;
10780}
10781
10782/*===========================================================================
10783 * FUNCTION : translateCapabilityToMetadata
10784 *
10785 * DESCRIPTION: translate the capability into camera_metadata_t
10786 *
10787 * PARAMETERS : type of the request
10788 *
10789 *
10790 * RETURN : success: camera_metadata_t*
10791 * failure: NULL
10792 *
10793 *==========================================================================*/
10794camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10795{
10796 if (mDefaultMetadata[type] != NULL) {
10797 return mDefaultMetadata[type];
10798 }
10799 //first time we are handling this request
10800 //fill up the metadata structure using the wrapper class
10801 CameraMetadata settings;
10802 //translate from cam_capability_t to camera_metadata_tag_t
10803 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10804 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10805 int32_t defaultRequestID = 0;
10806 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10807
10808 /* OIS disable */
10809 char ois_prop[PROPERTY_VALUE_MAX];
10810 memset(ois_prop, 0, sizeof(ois_prop));
10811 property_get("persist.camera.ois.disable", ois_prop, "0");
10812 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10813
10814 /* Force video to use OIS */
10815 char videoOisProp[PROPERTY_VALUE_MAX];
10816 memset(videoOisProp, 0, sizeof(videoOisProp));
10817 property_get("persist.camera.ois.video", videoOisProp, "1");
10818 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010819
10820 // Hybrid AE enable/disable
10821 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10822 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10823 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10824 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10825
Thierry Strudel3d639192016-09-09 11:52:26 -070010826 uint8_t controlIntent = 0;
10827 uint8_t focusMode;
10828 uint8_t vsMode;
10829 uint8_t optStabMode;
10830 uint8_t cacMode;
10831 uint8_t edge_mode;
10832 uint8_t noise_red_mode;
10833 uint8_t tonemap_mode;
10834 bool highQualityModeEntryAvailable = FALSE;
10835 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010836 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010837 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10838 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010839 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010840 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010841 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010842
Thierry Strudel3d639192016-09-09 11:52:26 -070010843 switch (type) {
10844 case CAMERA3_TEMPLATE_PREVIEW:
10845 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10846 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10847 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10848 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10849 edge_mode = ANDROID_EDGE_MODE_FAST;
10850 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10851 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10852 break;
10853 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10854 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10855 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10856 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10857 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10858 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10859 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10860 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10861 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10862 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10863 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10864 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10865 highQualityModeEntryAvailable = TRUE;
10866 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10867 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10868 fastModeEntryAvailable = TRUE;
10869 }
10870 }
10871 if (highQualityModeEntryAvailable) {
10872 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10873 } else if (fastModeEntryAvailable) {
10874 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10875 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010876 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10877 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10878 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010879 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010880 break;
10881 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10882 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10883 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10884 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010885 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10886 edge_mode = ANDROID_EDGE_MODE_FAST;
10887 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10888 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10889 if (forceVideoOis)
10890 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10891 break;
10892 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10893 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10894 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10895 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010896 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10897 edge_mode = ANDROID_EDGE_MODE_FAST;
10898 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10899 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10900 if (forceVideoOis)
10901 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10902 break;
10903 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10904 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10905 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10906 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10907 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10908 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10909 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10910 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10911 break;
10912 case CAMERA3_TEMPLATE_MANUAL:
10913 edge_mode = ANDROID_EDGE_MODE_FAST;
10914 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10915 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10916 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10917 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10918 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10919 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10920 break;
10921 default:
10922 edge_mode = ANDROID_EDGE_MODE_FAST;
10923 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10924 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10925 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10926 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10927 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10928 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10929 break;
10930 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010931 // Set CAC to OFF if underlying device doesn't support
10932 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10933 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10934 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010935 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10936 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10937 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10938 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10939 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10940 }
10941 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010942 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010943 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010944
10945 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10946 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10947 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10948 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10949 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10950 || ois_disable)
10951 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10952 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010953 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010954
10955 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10956 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10957
10958 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10959 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10960
10961 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10962 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10963
10964 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10965 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10966
10967 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10968 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10969
10970 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10971 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10972
10973 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10974 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10975
10976 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10977 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10978
10979 /*flash*/
10980 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10981 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10982
10983 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10984 settings.update(ANDROID_FLASH_FIRING_POWER,
10985 &flashFiringLevel, 1);
10986
10987 /* lens */
10988 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10989 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10990
10991 if (gCamCapability[mCameraId]->filter_densities_count) {
10992 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10993 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10994 gCamCapability[mCameraId]->filter_densities_count);
10995 }
10996
10997 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10998 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10999
Thierry Strudel3d639192016-09-09 11:52:26 -070011000 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11001 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11002
11003 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11004 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11005
11006 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11007 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11008
11009 /* face detection (default to OFF) */
11010 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11011 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11012
Thierry Strudel54dc9782017-02-15 12:12:10 -080011013 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11014 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011015
11016 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11017 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11018
11019 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11020 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11021
Thierry Strudel3d639192016-09-09 11:52:26 -070011022
11023 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11024 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11025
11026 /* Exposure time(Update the Min Exposure Time)*/
11027 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11028 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11029
11030 /* frame duration */
11031 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11032 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11033
11034 /* sensitivity */
11035 static const int32_t default_sensitivity = 100;
11036 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011037#ifndef USE_HAL_3_3
11038 static const int32_t default_isp_sensitivity =
11039 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11040 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11041#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011042
11043 /*edge mode*/
11044 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11045
11046 /*noise reduction mode*/
11047 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11048
11049 /*color correction mode*/
11050 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11051 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11052
11053 /*transform matrix mode*/
11054 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11055
11056 int32_t scaler_crop_region[4];
11057 scaler_crop_region[0] = 0;
11058 scaler_crop_region[1] = 0;
11059 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11060 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11061 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11062
11063 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11064 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11065
11066 /*focus distance*/
11067 float focus_distance = 0.0;
11068 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11069
11070 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011071 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011072 float max_range = 0.0;
11073 float max_fixed_fps = 0.0;
11074 int32_t fps_range[2] = {0, 0};
11075 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11076 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011077 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11078 TEMPLATE_MAX_PREVIEW_FPS) {
11079 continue;
11080 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011081 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11082 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11083 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11084 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11085 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11086 if (range > max_range) {
11087 fps_range[0] =
11088 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11089 fps_range[1] =
11090 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11091 max_range = range;
11092 }
11093 } else {
11094 if (range < 0.01 && max_fixed_fps <
11095 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11096 fps_range[0] =
11097 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11098 fps_range[1] =
11099 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11100 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11101 }
11102 }
11103 }
11104 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11105
11106 /*precapture trigger*/
11107 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11108 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11109
11110 /*af trigger*/
11111 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11112 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11113
11114 /* ae & af regions */
11115 int32_t active_region[] = {
11116 gCamCapability[mCameraId]->active_array_size.left,
11117 gCamCapability[mCameraId]->active_array_size.top,
11118 gCamCapability[mCameraId]->active_array_size.left +
11119 gCamCapability[mCameraId]->active_array_size.width,
11120 gCamCapability[mCameraId]->active_array_size.top +
11121 gCamCapability[mCameraId]->active_array_size.height,
11122 0};
11123 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11124 sizeof(active_region) / sizeof(active_region[0]));
11125 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11126 sizeof(active_region) / sizeof(active_region[0]));
11127
11128 /* black level lock */
11129 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11130 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11131
Thierry Strudel3d639192016-09-09 11:52:26 -070011132 //special defaults for manual template
11133 if (type == CAMERA3_TEMPLATE_MANUAL) {
11134 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11135 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11136
11137 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11138 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11139
11140 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11141 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11142
11143 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11144 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11145
11146 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11147 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11148
11149 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11150 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11151 }
11152
11153
11154 /* TNR
11155 * We'll use this location to determine which modes TNR will be set.
11156 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11157 * This is not to be confused with linking on a per stream basis that decision
11158 * is still on per-session basis and will be handled as part of config stream
11159 */
11160 uint8_t tnr_enable = 0;
11161
11162 if (m_bTnrPreview || m_bTnrVideo) {
11163
11164 switch (type) {
11165 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11166 tnr_enable = 1;
11167 break;
11168
11169 default:
11170 tnr_enable = 0;
11171 break;
11172 }
11173
11174 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11175 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11176 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11177
11178 LOGD("TNR:%d with process plate %d for template:%d",
11179 tnr_enable, tnr_process_type, type);
11180 }
11181
11182 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011183 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011184 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11185
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011186 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011187 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11188
Shuzhen Wang920ea402017-05-03 08:49:39 -070011189 uint8_t related_camera_id = mCameraId;
11190 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011191
11192 /* CDS default */
11193 char prop[PROPERTY_VALUE_MAX];
11194 memset(prop, 0, sizeof(prop));
11195 property_get("persist.camera.CDS", prop, "Auto");
11196 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11197 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11198 if (CAM_CDS_MODE_MAX == cds_mode) {
11199 cds_mode = CAM_CDS_MODE_AUTO;
11200 }
11201
11202 /* Disabling CDS in templates which have TNR enabled*/
11203 if (tnr_enable)
11204 cds_mode = CAM_CDS_MODE_OFF;
11205
11206 int32_t mode = cds_mode;
11207 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011208
Thierry Strudel269c81a2016-10-12 12:13:59 -070011209 /* Manual Convergence AEC Speed is disabled by default*/
11210 float default_aec_speed = 0;
11211 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11212
11213 /* Manual Convergence AWB Speed is disabled by default*/
11214 float default_awb_speed = 0;
11215 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11216
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011217 // Set instant AEC to normal convergence by default
11218 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11219 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11220
Shuzhen Wang19463d72016-03-08 11:09:52 -080011221 /* hybrid ae */
11222 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11223
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011224 if (gExposeEnableZslKey) {
11225 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11226 }
11227
Thierry Strudel3d639192016-09-09 11:52:26 -070011228 mDefaultMetadata[type] = settings.release();
11229
11230 return mDefaultMetadata[type];
11231}
11232
11233/*===========================================================================
11234 * FUNCTION : setFrameParameters
11235 *
11236 * DESCRIPTION: set parameters per frame as requested in the metadata from
11237 * framework
11238 *
11239 * PARAMETERS :
11240 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011241 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011242 * @blob_request: Whether this request is a blob request or not
11243 *
11244 * RETURN : success: NO_ERROR
11245 * failure:
11246 *==========================================================================*/
11247int QCamera3HardwareInterface::setFrameParameters(
11248 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011249 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011250 int blob_request,
11251 uint32_t snapshotStreamId)
11252{
11253 /*translate from camera_metadata_t type to parm_type_t*/
11254 int rc = 0;
11255 int32_t hal_version = CAM_HAL_V3;
11256
11257 clear_metadata_buffer(mParameters);
11258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11259 LOGE("Failed to set hal version in the parameters");
11260 return BAD_VALUE;
11261 }
11262
11263 /*we need to update the frame number in the parameters*/
11264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11265 request->frame_number)) {
11266 LOGE("Failed to set the frame number in the parameters");
11267 return BAD_VALUE;
11268 }
11269
11270 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011272 LOGE("Failed to set stream type mask in the parameters");
11273 return BAD_VALUE;
11274 }
11275
11276 if (mUpdateDebugLevel) {
11277 uint32_t dummyDebugLevel = 0;
11278 /* The value of dummyDebugLevel is irrelavent. On
11279 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11281 dummyDebugLevel)) {
11282 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11283 return BAD_VALUE;
11284 }
11285 mUpdateDebugLevel = false;
11286 }
11287
11288 if(request->settings != NULL){
11289 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11290 if (blob_request)
11291 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11292 }
11293
11294 return rc;
11295}
11296
11297/*===========================================================================
11298 * FUNCTION : setReprocParameters
11299 *
11300 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11301 * return it.
11302 *
11303 * PARAMETERS :
11304 * @request : request that needs to be serviced
11305 *
11306 * RETURN : success: NO_ERROR
11307 * failure:
11308 *==========================================================================*/
11309int32_t QCamera3HardwareInterface::setReprocParameters(
11310 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11311 uint32_t snapshotStreamId)
11312{
11313 /*translate from camera_metadata_t type to parm_type_t*/
11314 int rc = 0;
11315
11316 if (NULL == request->settings){
11317 LOGE("Reprocess settings cannot be NULL");
11318 return BAD_VALUE;
11319 }
11320
11321 if (NULL == reprocParam) {
11322 LOGE("Invalid reprocessing metadata buffer");
11323 return BAD_VALUE;
11324 }
11325 clear_metadata_buffer(reprocParam);
11326
11327 /*we need to update the frame number in the parameters*/
11328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11329 request->frame_number)) {
11330 LOGE("Failed to set the frame number in the parameters");
11331 return BAD_VALUE;
11332 }
11333
11334 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11335 if (rc < 0) {
11336 LOGE("Failed to translate reproc request");
11337 return rc;
11338 }
11339
11340 CameraMetadata frame_settings;
11341 frame_settings = request->settings;
11342 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11343 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11344 int32_t *crop_count =
11345 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11346 int32_t *crop_data =
11347 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11348 int32_t *roi_map =
11349 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11350 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11351 cam_crop_data_t crop_meta;
11352 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11353 crop_meta.num_of_streams = 1;
11354 crop_meta.crop_info[0].crop.left = crop_data[0];
11355 crop_meta.crop_info[0].crop.top = crop_data[1];
11356 crop_meta.crop_info[0].crop.width = crop_data[2];
11357 crop_meta.crop_info[0].crop.height = crop_data[3];
11358
11359 crop_meta.crop_info[0].roi_map.left =
11360 roi_map[0];
11361 crop_meta.crop_info[0].roi_map.top =
11362 roi_map[1];
11363 crop_meta.crop_info[0].roi_map.width =
11364 roi_map[2];
11365 crop_meta.crop_info[0].roi_map.height =
11366 roi_map[3];
11367
11368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11369 rc = BAD_VALUE;
11370 }
11371 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11372 request->input_buffer->stream,
11373 crop_meta.crop_info[0].crop.left,
11374 crop_meta.crop_info[0].crop.top,
11375 crop_meta.crop_info[0].crop.width,
11376 crop_meta.crop_info[0].crop.height);
11377 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11378 request->input_buffer->stream,
11379 crop_meta.crop_info[0].roi_map.left,
11380 crop_meta.crop_info[0].roi_map.top,
11381 crop_meta.crop_info[0].roi_map.width,
11382 crop_meta.crop_info[0].roi_map.height);
11383 } else {
11384 LOGE("Invalid reprocess crop count %d!", *crop_count);
11385 }
11386 } else {
11387 LOGE("No crop data from matching output stream");
11388 }
11389
11390 /* These settings are not needed for regular requests so handle them specially for
11391 reprocess requests; information needed for EXIF tags */
11392 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11393 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11394 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11395 if (NAME_NOT_FOUND != val) {
11396 uint32_t flashMode = (uint32_t)val;
11397 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11398 rc = BAD_VALUE;
11399 }
11400 } else {
11401 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11402 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11403 }
11404 } else {
11405 LOGH("No flash mode in reprocess settings");
11406 }
11407
11408 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11409 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11410 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11411 rc = BAD_VALUE;
11412 }
11413 } else {
11414 LOGH("No flash state in reprocess settings");
11415 }
11416
11417 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11418 uint8_t *reprocessFlags =
11419 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11421 *reprocessFlags)) {
11422 rc = BAD_VALUE;
11423 }
11424 }
11425
Thierry Strudel54dc9782017-02-15 12:12:10 -080011426 // Add exif debug data to internal metadata
11427 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11428 mm_jpeg_debug_exif_params_t *debug_params =
11429 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11430 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11431 // AE
11432 if (debug_params->ae_debug_params_valid == TRUE) {
11433 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11434 debug_params->ae_debug_params);
11435 }
11436 // AWB
11437 if (debug_params->awb_debug_params_valid == TRUE) {
11438 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11439 debug_params->awb_debug_params);
11440 }
11441 // AF
11442 if (debug_params->af_debug_params_valid == TRUE) {
11443 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11444 debug_params->af_debug_params);
11445 }
11446 // ASD
11447 if (debug_params->asd_debug_params_valid == TRUE) {
11448 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11449 debug_params->asd_debug_params);
11450 }
11451 // Stats
11452 if (debug_params->stats_debug_params_valid == TRUE) {
11453 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11454 debug_params->stats_debug_params);
11455 }
11456 // BE Stats
11457 if (debug_params->bestats_debug_params_valid == TRUE) {
11458 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11459 debug_params->bestats_debug_params);
11460 }
11461 // BHIST
11462 if (debug_params->bhist_debug_params_valid == TRUE) {
11463 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11464 debug_params->bhist_debug_params);
11465 }
11466 // 3A Tuning
11467 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11468 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11469 debug_params->q3a_tuning_debug_params);
11470 }
11471 }
11472
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011473 // Add metadata which reprocess needs
11474 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11475 cam_reprocess_info_t *repro_info =
11476 (cam_reprocess_info_t *)frame_settings.find
11477 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011478 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011479 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011480 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011481 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011482 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011483 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011484 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011485 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011486 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011487 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011488 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011489 repro_info->pipeline_flip);
11490 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11491 repro_info->af_roi);
11492 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11493 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011494 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11495 CAM_INTF_PARM_ROTATION metadata then has been added in
11496 translateToHalMetadata. HAL need to keep this new rotation
11497 metadata. Otherwise, the old rotation info saved in the vendor tag
11498 would be used */
11499 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11500 CAM_INTF_PARM_ROTATION, reprocParam) {
11501 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11502 } else {
11503 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011504 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011505 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011506 }
11507
11508 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11509 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11510 roi.width and roi.height would be the final JPEG size.
11511 For now, HAL only checks this for reprocess request */
11512 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11513 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11514 uint8_t *enable =
11515 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11516 if (*enable == TRUE) {
11517 int32_t *crop_data =
11518 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11519 cam_stream_crop_info_t crop_meta;
11520 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11521 crop_meta.stream_id = 0;
11522 crop_meta.crop.left = crop_data[0];
11523 crop_meta.crop.top = crop_data[1];
11524 crop_meta.crop.width = crop_data[2];
11525 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011526 // The JPEG crop roi should match cpp output size
11527 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11528 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11529 crop_meta.roi_map.left = 0;
11530 crop_meta.roi_map.top = 0;
11531 crop_meta.roi_map.width = cpp_crop->crop.width;
11532 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011533 }
11534 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11535 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011536 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011537 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011538 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11539 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011540 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011541 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11542
11543 // Add JPEG scale information
11544 cam_dimension_t scale_dim;
11545 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11546 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11547 int32_t *roi =
11548 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11549 scale_dim.width = roi[2];
11550 scale_dim.height = roi[3];
11551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11552 scale_dim);
11553 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11554 scale_dim.width, scale_dim.height, mCameraId);
11555 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011556 }
11557 }
11558
11559 return rc;
11560}
11561
11562/*===========================================================================
11563 * FUNCTION : saveRequestSettings
11564 *
11565 * DESCRIPTION: Add any settings that might have changed to the request settings
11566 * and save the settings to be applied on the frame
11567 *
11568 * PARAMETERS :
11569 * @jpegMetadata : the extracted and/or modified jpeg metadata
11570 * @request : request with initial settings
11571 *
11572 * RETURN :
11573 * camera_metadata_t* : pointer to the saved request settings
11574 *==========================================================================*/
11575camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11576 const CameraMetadata &jpegMetadata,
11577 camera3_capture_request_t *request)
11578{
11579 camera_metadata_t *resultMetadata;
11580 CameraMetadata camMetadata;
11581 camMetadata = request->settings;
11582
11583 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11584 int32_t thumbnail_size[2];
11585 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11586 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11587 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11588 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11589 }
11590
11591 if (request->input_buffer != NULL) {
11592 uint8_t reprocessFlags = 1;
11593 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11594 (uint8_t*)&reprocessFlags,
11595 sizeof(reprocessFlags));
11596 }
11597
11598 resultMetadata = camMetadata.release();
11599 return resultMetadata;
11600}
11601
11602/*===========================================================================
11603 * FUNCTION : setHalFpsRange
11604 *
11605 * DESCRIPTION: set FPS range parameter
11606 *
11607 *
11608 * PARAMETERS :
11609 * @settings : Metadata from framework
11610 * @hal_metadata: Metadata buffer
11611 *
11612 *
11613 * RETURN : success: NO_ERROR
11614 * failure:
11615 *==========================================================================*/
11616int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11617 metadata_buffer_t *hal_metadata)
11618{
11619 int32_t rc = NO_ERROR;
11620 cam_fps_range_t fps_range;
11621 fps_range.min_fps = (float)
11622 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11623 fps_range.max_fps = (float)
11624 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11625 fps_range.video_min_fps = fps_range.min_fps;
11626 fps_range.video_max_fps = fps_range.max_fps;
11627
11628 LOGD("aeTargetFpsRange fps: [%f %f]",
11629 fps_range.min_fps, fps_range.max_fps);
11630 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11631 * follows:
11632 * ---------------------------------------------------------------|
11633 * Video stream is absent in configure_streams |
11634 * (Camcorder preview before the first video record |
11635 * ---------------------------------------------------------------|
11636 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11637 * | | | vid_min/max_fps|
11638 * ---------------------------------------------------------------|
11639 * NO | [ 30, 240] | 240 | [240, 240] |
11640 * |-------------|-------------|----------------|
11641 * | [240, 240] | 240 | [240, 240] |
11642 * ---------------------------------------------------------------|
11643 * Video stream is present in configure_streams |
11644 * ---------------------------------------------------------------|
11645 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11646 * | | | vid_min/max_fps|
11647 * ---------------------------------------------------------------|
11648 * NO | [ 30, 240] | 240 | [240, 240] |
11649 * (camcorder prev |-------------|-------------|----------------|
11650 * after video rec | [240, 240] | 240 | [240, 240] |
11651 * is stopped) | | | |
11652 * ---------------------------------------------------------------|
11653 * YES | [ 30, 240] | 240 | [240, 240] |
11654 * |-------------|-------------|----------------|
11655 * | [240, 240] | 240 | [240, 240] |
11656 * ---------------------------------------------------------------|
11657 * When Video stream is absent in configure_streams,
11658 * preview fps = sensor_fps / batchsize
11659 * Eg: for 240fps at batchSize 4, preview = 60fps
11660 * for 120fps at batchSize 4, preview = 30fps
11661 *
11662 * When video stream is present in configure_streams, preview fps is as per
11663 * the ratio of preview buffers to video buffers requested in process
11664 * capture request
11665 */
11666 mBatchSize = 0;
11667 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11668 fps_range.min_fps = fps_range.video_max_fps;
11669 fps_range.video_min_fps = fps_range.video_max_fps;
11670 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11671 fps_range.max_fps);
11672 if (NAME_NOT_FOUND != val) {
11673 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11675 return BAD_VALUE;
11676 }
11677
11678 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11679 /* If batchmode is currently in progress and the fps changes,
11680 * set the flag to restart the sensor */
11681 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11682 (mHFRVideoFps != fps_range.max_fps)) {
11683 mNeedSensorRestart = true;
11684 }
11685 mHFRVideoFps = fps_range.max_fps;
11686 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11687 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11688 mBatchSize = MAX_HFR_BATCH_SIZE;
11689 }
11690 }
11691 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11692
11693 }
11694 } else {
11695 /* HFR mode is session param in backend/ISP. This should be reset when
11696 * in non-HFR mode */
11697 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11699 return BAD_VALUE;
11700 }
11701 }
11702 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11703 return BAD_VALUE;
11704 }
11705 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11706 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11707 return rc;
11708}
11709
11710/*===========================================================================
11711 * FUNCTION : translateToHalMetadata
11712 *
11713 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11714 *
11715 *
11716 * PARAMETERS :
11717 * @request : request sent from framework
11718 *
11719 *
11720 * RETURN : success: NO_ERROR
11721 * failure:
11722 *==========================================================================*/
11723int QCamera3HardwareInterface::translateToHalMetadata
11724 (const camera3_capture_request_t *request,
11725 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011726 uint32_t snapshotStreamId) {
11727 if (request == nullptr || hal_metadata == nullptr) {
11728 return BAD_VALUE;
11729 }
11730
11731 int64_t minFrameDuration = getMinFrameDuration(request);
11732
11733 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11734 minFrameDuration);
11735}
11736
11737int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11738 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11739 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11740
Thierry Strudel3d639192016-09-09 11:52:26 -070011741 int rc = 0;
11742 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011743 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011744
11745 /* Do not change the order of the following list unless you know what you are
11746 * doing.
11747 * The order is laid out in such a way that parameters in the front of the table
11748 * may be used to override the parameters later in the table. Examples are:
11749 * 1. META_MODE should precede AEC/AWB/AF MODE
11750 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11751 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11752 * 4. Any mode should precede it's corresponding settings
11753 */
11754 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11755 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11757 rc = BAD_VALUE;
11758 }
11759 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11760 if (rc != NO_ERROR) {
11761 LOGE("extractSceneMode failed");
11762 }
11763 }
11764
11765 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11766 uint8_t fwk_aeMode =
11767 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11768 uint8_t aeMode;
11769 int32_t redeye;
11770
11771 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11772 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011773 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11774 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011775 } else {
11776 aeMode = CAM_AE_MODE_ON;
11777 }
11778 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11779 redeye = 1;
11780 } else {
11781 redeye = 0;
11782 }
11783
11784 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11785 fwk_aeMode);
11786 if (NAME_NOT_FOUND != val) {
11787 int32_t flashMode = (int32_t)val;
11788 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11789 }
11790
11791 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11793 rc = BAD_VALUE;
11794 }
11795 }
11796
11797 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11798 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11799 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11800 fwk_whiteLevel);
11801 if (NAME_NOT_FOUND != val) {
11802 uint8_t whiteLevel = (uint8_t)val;
11803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11804 rc = BAD_VALUE;
11805 }
11806 }
11807 }
11808
11809 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11810 uint8_t fwk_cacMode =
11811 frame_settings.find(
11812 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11813 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11814 fwk_cacMode);
11815 if (NAME_NOT_FOUND != val) {
11816 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11817 bool entryAvailable = FALSE;
11818 // Check whether Frameworks set CAC mode is supported in device or not
11819 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11820 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11821 entryAvailable = TRUE;
11822 break;
11823 }
11824 }
11825 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11826 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11827 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11828 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11829 if (entryAvailable == FALSE) {
11830 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11831 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11832 } else {
11833 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11834 // High is not supported and so set the FAST as spec say's underlying
11835 // device implementation can be the same for both modes.
11836 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11837 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11838 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11839 // in order to avoid the fps drop due to high quality
11840 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11841 } else {
11842 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11843 }
11844 }
11845 }
11846 LOGD("Final cacMode is %d", cacMode);
11847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11848 rc = BAD_VALUE;
11849 }
11850 } else {
11851 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11852 }
11853 }
11854
Thierry Strudel2896d122017-02-23 19:18:03 -080011855 char af_value[PROPERTY_VALUE_MAX];
11856 property_get("persist.camera.af.infinity", af_value, "0");
11857
Jason Lee84ae9972017-02-24 13:24:24 -080011858 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011859 if (atoi(af_value) == 0) {
11860 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011861 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011862 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11863 fwk_focusMode);
11864 if (NAME_NOT_FOUND != val) {
11865 uint8_t focusMode = (uint8_t)val;
11866 LOGD("set focus mode %d", focusMode);
11867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11868 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11869 rc = BAD_VALUE;
11870 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011871 }
11872 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011873 } else {
11874 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11875 LOGE("Focus forced to infinity %d", focusMode);
11876 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11877 rc = BAD_VALUE;
11878 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011879 }
11880
Jason Lee84ae9972017-02-24 13:24:24 -080011881 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11882 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011883 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11885 focalDistance)) {
11886 rc = BAD_VALUE;
11887 }
11888 }
11889
11890 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11891 uint8_t fwk_antibandingMode =
11892 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11893 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11894 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11895 if (NAME_NOT_FOUND != val) {
11896 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011897 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11898 if (m60HzZone) {
11899 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11900 } else {
11901 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11902 }
11903 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11905 hal_antibandingMode)) {
11906 rc = BAD_VALUE;
11907 }
11908 }
11909 }
11910
11911 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11912 int32_t expCompensation = frame_settings.find(
11913 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11914 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11915 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11916 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11917 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011918 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11920 expCompensation)) {
11921 rc = BAD_VALUE;
11922 }
11923 }
11924
11925 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11926 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11928 rc = BAD_VALUE;
11929 }
11930 }
11931 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11932 rc = setHalFpsRange(frame_settings, hal_metadata);
11933 if (rc != NO_ERROR) {
11934 LOGE("setHalFpsRange failed");
11935 }
11936 }
11937
11938 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11939 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11941 rc = BAD_VALUE;
11942 }
11943 }
11944
11945 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11946 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11947 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11948 fwk_effectMode);
11949 if (NAME_NOT_FOUND != val) {
11950 uint8_t effectMode = (uint8_t)val;
11951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11952 rc = BAD_VALUE;
11953 }
11954 }
11955 }
11956
11957 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11958 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11960 colorCorrectMode)) {
11961 rc = BAD_VALUE;
11962 }
11963 }
11964
11965 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11966 cam_color_correct_gains_t colorCorrectGains;
11967 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11968 colorCorrectGains.gains[i] =
11969 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11970 }
11971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11972 colorCorrectGains)) {
11973 rc = BAD_VALUE;
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11978 cam_color_correct_matrix_t colorCorrectTransform;
11979 cam_rational_type_t transform_elem;
11980 size_t num = 0;
11981 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11982 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11983 transform_elem.numerator =
11984 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11985 transform_elem.denominator =
11986 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11987 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11988 num++;
11989 }
11990 }
11991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11992 colorCorrectTransform)) {
11993 rc = BAD_VALUE;
11994 }
11995 }
11996
11997 cam_trigger_t aecTrigger;
11998 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11999 aecTrigger.trigger_id = -1;
12000 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12001 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12002 aecTrigger.trigger =
12003 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12004 aecTrigger.trigger_id =
12005 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12006 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12007 aecTrigger)) {
12008 rc = BAD_VALUE;
12009 }
12010 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12011 aecTrigger.trigger, aecTrigger.trigger_id);
12012 }
12013
12014 /*af_trigger must come with a trigger id*/
12015 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12016 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12017 cam_trigger_t af_trigger;
12018 af_trigger.trigger =
12019 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12020 af_trigger.trigger_id =
12021 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12023 rc = BAD_VALUE;
12024 }
12025 LOGD("AfTrigger: %d AfTriggerID: %d",
12026 af_trigger.trigger, af_trigger.trigger_id);
12027 }
12028
12029 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12030 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12031 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12032 rc = BAD_VALUE;
12033 }
12034 }
12035 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12036 cam_edge_application_t edge_application;
12037 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012038
Thierry Strudel3d639192016-09-09 11:52:26 -070012039 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12040 edge_application.sharpness = 0;
12041 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012042 edge_application.sharpness =
12043 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12044 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12045 int32_t sharpness =
12046 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12047 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12048 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12049 LOGD("Setting edge mode sharpness %d", sharpness);
12050 edge_application.sharpness = sharpness;
12051 }
12052 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012053 }
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12055 rc = BAD_VALUE;
12056 }
12057 }
12058
12059 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12060 int32_t respectFlashMode = 1;
12061 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12062 uint8_t fwk_aeMode =
12063 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012064 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12065 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12066 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012067 respectFlashMode = 0;
12068 LOGH("AE Mode controls flash, ignore android.flash.mode");
12069 }
12070 }
12071 if (respectFlashMode) {
12072 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12073 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12074 LOGH("flash mode after mapping %d", val);
12075 // To check: CAM_INTF_META_FLASH_MODE usage
12076 if (NAME_NOT_FOUND != val) {
12077 uint8_t flashMode = (uint8_t)val;
12078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12079 rc = BAD_VALUE;
12080 }
12081 }
12082 }
12083 }
12084
12085 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12086 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12088 rc = BAD_VALUE;
12089 }
12090 }
12091
12092 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12093 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12095 flashFiringTime)) {
12096 rc = BAD_VALUE;
12097 }
12098 }
12099
12100 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12101 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12103 hotPixelMode)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107
12108 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12109 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12111 lensAperture)) {
12112 rc = BAD_VALUE;
12113 }
12114 }
12115
12116 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12117 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12119 filterDensity)) {
12120 rc = BAD_VALUE;
12121 }
12122 }
12123
12124 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12125 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12127 focalLength)) {
12128 rc = BAD_VALUE;
12129 }
12130 }
12131
12132 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12133 uint8_t optStabMode =
12134 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12136 optStabMode)) {
12137 rc = BAD_VALUE;
12138 }
12139 }
12140
12141 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12142 uint8_t videoStabMode =
12143 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12144 LOGD("videoStabMode from APP = %d", videoStabMode);
12145 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12146 videoStabMode)) {
12147 rc = BAD_VALUE;
12148 }
12149 }
12150
12151
12152 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12153 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12154 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12155 noiseRedMode)) {
12156 rc = BAD_VALUE;
12157 }
12158 }
12159
12160 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12161 float reprocessEffectiveExposureFactor =
12162 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12164 reprocessEffectiveExposureFactor)) {
12165 rc = BAD_VALUE;
12166 }
12167 }
12168
12169 cam_crop_region_t scalerCropRegion;
12170 bool scalerCropSet = false;
12171 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12172 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12173 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12174 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12175 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12176
12177 // Map coordinate system from active array to sensor output.
12178 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12179 scalerCropRegion.width, scalerCropRegion.height);
12180
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12182 scalerCropRegion)) {
12183 rc = BAD_VALUE;
12184 }
12185 scalerCropSet = true;
12186 }
12187
12188 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12189 int64_t sensorExpTime =
12190 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12191 LOGD("setting sensorExpTime %lld", sensorExpTime);
12192 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12193 sensorExpTime)) {
12194 rc = BAD_VALUE;
12195 }
12196 }
12197
12198 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12199 int64_t sensorFrameDuration =
12200 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012201 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12202 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12203 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12204 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12206 sensorFrameDuration)) {
12207 rc = BAD_VALUE;
12208 }
12209 }
12210
12211 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12212 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12213 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12214 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12215 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12216 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12217 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12219 sensorSensitivity)) {
12220 rc = BAD_VALUE;
12221 }
12222 }
12223
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012224#ifndef USE_HAL_3_3
12225 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12226 int32_t ispSensitivity =
12227 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12228 if (ispSensitivity <
12229 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12230 ispSensitivity =
12231 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12232 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12233 }
12234 if (ispSensitivity >
12235 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12236 ispSensitivity =
12237 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12238 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12239 }
12240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12241 ispSensitivity)) {
12242 rc = BAD_VALUE;
12243 }
12244 }
12245#endif
12246
Thierry Strudel3d639192016-09-09 11:52:26 -070012247 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12248 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12255 uint8_t fwk_facedetectMode =
12256 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12257
12258 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12259 fwk_facedetectMode);
12260
12261 if (NAME_NOT_FOUND != val) {
12262 uint8_t facedetectMode = (uint8_t)val;
12263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12264 facedetectMode)) {
12265 rc = BAD_VALUE;
12266 }
12267 }
12268 }
12269
Thierry Strudel54dc9782017-02-15 12:12:10 -080012270 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012271 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012272 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12274 histogramMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12280 uint8_t sharpnessMapMode =
12281 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12283 sharpnessMapMode)) {
12284 rc = BAD_VALUE;
12285 }
12286 }
12287
12288 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12289 uint8_t tonemapMode =
12290 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12292 rc = BAD_VALUE;
12293 }
12294 }
12295 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12296 /*All tonemap channels will have the same number of points*/
12297 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12298 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12299 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12300 cam_rgb_tonemap_curves tonemapCurves;
12301 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12302 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12303 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12304 tonemapCurves.tonemap_points_cnt,
12305 CAM_MAX_TONEMAP_CURVE_SIZE);
12306 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12307 }
12308
12309 /* ch0 = G*/
12310 size_t point = 0;
12311 cam_tonemap_curve_t tonemapCurveGreen;
12312 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12313 for (size_t j = 0; j < 2; j++) {
12314 tonemapCurveGreen.tonemap_points[i][j] =
12315 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12316 point++;
12317 }
12318 }
12319 tonemapCurves.curves[0] = tonemapCurveGreen;
12320
12321 /* ch 1 = B */
12322 point = 0;
12323 cam_tonemap_curve_t tonemapCurveBlue;
12324 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12325 for (size_t j = 0; j < 2; j++) {
12326 tonemapCurveBlue.tonemap_points[i][j] =
12327 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12328 point++;
12329 }
12330 }
12331 tonemapCurves.curves[1] = tonemapCurveBlue;
12332
12333 /* ch 2 = R */
12334 point = 0;
12335 cam_tonemap_curve_t tonemapCurveRed;
12336 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12337 for (size_t j = 0; j < 2; j++) {
12338 tonemapCurveRed.tonemap_points[i][j] =
12339 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12340 point++;
12341 }
12342 }
12343 tonemapCurves.curves[2] = tonemapCurveRed;
12344
12345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12346 tonemapCurves)) {
12347 rc = BAD_VALUE;
12348 }
12349 }
12350
12351 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12352 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12354 captureIntent)) {
12355 rc = BAD_VALUE;
12356 }
12357 }
12358
12359 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12360 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12361 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12362 blackLevelLock)) {
12363 rc = BAD_VALUE;
12364 }
12365 }
12366
12367 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12368 uint8_t lensShadingMapMode =
12369 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12371 lensShadingMapMode)) {
12372 rc = BAD_VALUE;
12373 }
12374 }
12375
12376 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12377 cam_area_t roi;
12378 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012379 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012380
12381 // Map coordinate system from active array to sensor output.
12382 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12383 roi.rect.height);
12384
12385 if (scalerCropSet) {
12386 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12387 }
12388 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12389 rc = BAD_VALUE;
12390 }
12391 }
12392
12393 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12394 cam_area_t roi;
12395 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012396 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012397
12398 // Map coordinate system from active array to sensor output.
12399 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12400 roi.rect.height);
12401
12402 if (scalerCropSet) {
12403 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12404 }
12405 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12406 rc = BAD_VALUE;
12407 }
12408 }
12409
12410 // CDS for non-HFR non-video mode
12411 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12412 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12413 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12414 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12415 LOGE("Invalid CDS mode %d!", *fwk_cds);
12416 } else {
12417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12418 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12419 rc = BAD_VALUE;
12420 }
12421 }
12422 }
12423
Thierry Strudel04e026f2016-10-10 11:27:36 -070012424 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012425 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012426 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012427 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12428 }
12429 if (m_bVideoHdrEnabled)
12430 vhdr = CAM_VIDEO_HDR_MODE_ON;
12431
Thierry Strudel54dc9782017-02-15 12:12:10 -080012432 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12433
12434 if(vhdr != curr_hdr_state)
12435 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12436
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012437 rc = setVideoHdrMode(mParameters, vhdr);
12438 if (rc != NO_ERROR) {
12439 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012440 }
12441
12442 //IR
12443 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12444 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12445 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012446 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12447 uint8_t isIRon = 0;
12448
12449 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012450 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12451 LOGE("Invalid IR mode %d!", fwk_ir);
12452 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012453 if(isIRon != curr_ir_state )
12454 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12455
Thierry Strudel04e026f2016-10-10 11:27:36 -070012456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12457 CAM_INTF_META_IR_MODE, fwk_ir)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461 }
12462
Thierry Strudel54dc9782017-02-15 12:12:10 -080012463 //Binning Correction Mode
12464 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12465 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12466 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12467 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12468 || (0 > fwk_binning_correction)) {
12469 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12470 } else {
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12472 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476 }
12477
Thierry Strudel269c81a2016-10-12 12:13:59 -070012478 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12479 float aec_speed;
12480 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12481 LOGD("AEC Speed :%f", aec_speed);
12482 if ( aec_speed < 0 ) {
12483 LOGE("Invalid AEC mode %f!", aec_speed);
12484 } else {
12485 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12486 aec_speed)) {
12487 rc = BAD_VALUE;
12488 }
12489 }
12490 }
12491
12492 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12493 float awb_speed;
12494 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12495 LOGD("AWB Speed :%f", awb_speed);
12496 if ( awb_speed < 0 ) {
12497 LOGE("Invalid AWB mode %f!", awb_speed);
12498 } else {
12499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12500 awb_speed)) {
12501 rc = BAD_VALUE;
12502 }
12503 }
12504 }
12505
Thierry Strudel3d639192016-09-09 11:52:26 -070012506 // TNR
12507 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12508 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12509 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012510 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012511 cam_denoise_param_t tnr;
12512 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12513 tnr.process_plates =
12514 (cam_denoise_process_type_t)frame_settings.find(
12515 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12516 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012517
12518 if(b_TnrRequested != curr_tnr_state)
12519 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12520
Thierry Strudel3d639192016-09-09 11:52:26 -070012521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12522 rc = BAD_VALUE;
12523 }
12524 }
12525
Thierry Strudel54dc9782017-02-15 12:12:10 -080012526 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012527 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012528 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12530 *exposure_metering_mode)) {
12531 rc = BAD_VALUE;
12532 }
12533 }
12534
Thierry Strudel3d639192016-09-09 11:52:26 -070012535 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12536 int32_t fwk_testPatternMode =
12537 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12538 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12539 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12540
12541 if (NAME_NOT_FOUND != testPatternMode) {
12542 cam_test_pattern_data_t testPatternData;
12543 memset(&testPatternData, 0, sizeof(testPatternData));
12544 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12545 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12546 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12547 int32_t *fwk_testPatternData =
12548 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12549 testPatternData.r = fwk_testPatternData[0];
12550 testPatternData.b = fwk_testPatternData[3];
12551 switch (gCamCapability[mCameraId]->color_arrangement) {
12552 case CAM_FILTER_ARRANGEMENT_RGGB:
12553 case CAM_FILTER_ARRANGEMENT_GRBG:
12554 testPatternData.gr = fwk_testPatternData[1];
12555 testPatternData.gb = fwk_testPatternData[2];
12556 break;
12557 case CAM_FILTER_ARRANGEMENT_GBRG:
12558 case CAM_FILTER_ARRANGEMENT_BGGR:
12559 testPatternData.gr = fwk_testPatternData[2];
12560 testPatternData.gb = fwk_testPatternData[1];
12561 break;
12562 default:
12563 LOGE("color arrangement %d is not supported",
12564 gCamCapability[mCameraId]->color_arrangement);
12565 break;
12566 }
12567 }
12568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12569 testPatternData)) {
12570 rc = BAD_VALUE;
12571 }
12572 } else {
12573 LOGE("Invalid framework sensor test pattern mode %d",
12574 fwk_testPatternMode);
12575 }
12576 }
12577
12578 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12579 size_t count = 0;
12580 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12581 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12582 gps_coords.data.d, gps_coords.count, count);
12583 if (gps_coords.count != count) {
12584 rc = BAD_VALUE;
12585 }
12586 }
12587
12588 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12589 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12590 size_t count = 0;
12591 const char *gps_methods_src = (const char *)
12592 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12593 memset(gps_methods, '\0', sizeof(gps_methods));
12594 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12595 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12596 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12597 if (GPS_PROCESSING_METHOD_SIZE != count) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
12602 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12603 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12605 gps_timestamp)) {
12606 rc = BAD_VALUE;
12607 }
12608 }
12609
12610 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12611 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12612 cam_rotation_info_t rotation_info;
12613 if (orientation == 0) {
12614 rotation_info.rotation = ROTATE_0;
12615 } else if (orientation == 90) {
12616 rotation_info.rotation = ROTATE_90;
12617 } else if (orientation == 180) {
12618 rotation_info.rotation = ROTATE_180;
12619 } else if (orientation == 270) {
12620 rotation_info.rotation = ROTATE_270;
12621 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012622 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012623 rotation_info.streamId = snapshotStreamId;
12624 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12626 rc = BAD_VALUE;
12627 }
12628 }
12629
12630 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12631 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12633 rc = BAD_VALUE;
12634 }
12635 }
12636
12637 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12638 uint32_t thumb_quality = (uint32_t)
12639 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12640 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12641 thumb_quality)) {
12642 rc = BAD_VALUE;
12643 }
12644 }
12645
12646 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12647 cam_dimension_t dim;
12648 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12649 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12651 rc = BAD_VALUE;
12652 }
12653 }
12654
12655 // Internal metadata
12656 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12657 size_t count = 0;
12658 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12659 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12660 privatedata.data.i32, privatedata.count, count);
12661 if (privatedata.count != count) {
12662 rc = BAD_VALUE;
12663 }
12664 }
12665
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012666 // ISO/Exposure Priority
12667 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12668 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12669 cam_priority_mode_t mode =
12670 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12671 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12672 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12673 use_iso_exp_pty.previewOnly = FALSE;
12674 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12675 use_iso_exp_pty.value = *ptr;
12676
12677 if(CAM_ISO_PRIORITY == mode) {
12678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12679 use_iso_exp_pty)) {
12680 rc = BAD_VALUE;
12681 }
12682 }
12683 else {
12684 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12685 use_iso_exp_pty)) {
12686 rc = BAD_VALUE;
12687 }
12688 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012689
12690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12691 rc = BAD_VALUE;
12692 }
12693 }
12694 } else {
12695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12696 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012697 }
12698 }
12699
12700 // Saturation
12701 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12702 int32_t* use_saturation =
12703 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12705 rc = BAD_VALUE;
12706 }
12707 }
12708
Thierry Strudel3d639192016-09-09 11:52:26 -070012709 // EV step
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12711 gCamCapability[mCameraId]->exp_compensation_step)) {
12712 rc = BAD_VALUE;
12713 }
12714
12715 // CDS info
12716 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12717 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12718 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12719
12720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12721 CAM_INTF_META_CDS_DATA, *cdsData)) {
12722 rc = BAD_VALUE;
12723 }
12724 }
12725
Shuzhen Wang19463d72016-03-08 11:09:52 -080012726 // Hybrid AE
12727 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12728 uint8_t *hybrid_ae = (uint8_t *)
12729 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12730
12731 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12732 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12733 rc = BAD_VALUE;
12734 }
12735 }
12736
Shuzhen Wang14415f52016-11-16 18:26:18 -080012737 // Histogram
12738 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12739 uint8_t histogramMode =
12740 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12742 histogramMode)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
12747 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12748 int32_t histogramBins =
12749 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12751 histogramBins)) {
12752 rc = BAD_VALUE;
12753 }
12754 }
12755
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012756 // Tracking AF
12757 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12758 uint8_t trackingAfTrigger =
12759 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12760 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12761 trackingAfTrigger)) {
12762 rc = BAD_VALUE;
12763 }
12764 }
12765
Thierry Strudel3d639192016-09-09 11:52:26 -070012766 return rc;
12767}
12768
12769/*===========================================================================
12770 * FUNCTION : captureResultCb
12771 *
12772 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12773 *
12774 * PARAMETERS :
12775 * @frame : frame information from mm-camera-interface
12776 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12777 * @userdata: userdata
12778 *
12779 * RETURN : NONE
12780 *==========================================================================*/
12781void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12782 camera3_stream_buffer_t *buffer,
12783 uint32_t frame_number, bool isInputBuffer, void *userdata)
12784{
12785 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12786 if (hw == NULL) {
12787 LOGE("Invalid hw %p", hw);
12788 return;
12789 }
12790
12791 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12792 return;
12793}
12794
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012795/*===========================================================================
12796 * FUNCTION : setBufferErrorStatus
12797 *
12798 * DESCRIPTION: Callback handler for channels to report any buffer errors
12799 *
12800 * PARAMETERS :
12801 * @ch : Channel on which buffer error is reported from
12802 * @frame_number : frame number on which buffer error is reported on
12803 * @buffer_status : buffer error status
12804 * @userdata: userdata
12805 *
12806 * RETURN : NONE
12807 *==========================================================================*/
12808void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12809 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12810{
12811 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12812 if (hw == NULL) {
12813 LOGE("Invalid hw %p", hw);
12814 return;
12815 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012816
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012817 hw->setBufferErrorStatus(ch, frame_number, err);
12818 return;
12819}
12820
12821void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12822 uint32_t frameNumber, camera3_buffer_status_t err)
12823{
12824 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12825 pthread_mutex_lock(&mMutex);
12826
12827 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12828 if (req.frame_number != frameNumber)
12829 continue;
12830 for (auto& k : req.mPendingBufferList) {
12831 if(k.stream->priv == ch) {
12832 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12833 }
12834 }
12835 }
12836
12837 pthread_mutex_unlock(&mMutex);
12838 return;
12839}
Thierry Strudel3d639192016-09-09 11:52:26 -070012840/*===========================================================================
12841 * FUNCTION : initialize
12842 *
12843 * DESCRIPTION: Pass framework callback pointers to HAL
12844 *
12845 * PARAMETERS :
12846 *
12847 *
12848 * RETURN : Success : 0
12849 * Failure: -ENODEV
12850 *==========================================================================*/
12851
12852int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12853 const camera3_callback_ops_t *callback_ops)
12854{
12855 LOGD("E");
12856 QCamera3HardwareInterface *hw =
12857 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12858 if (!hw) {
12859 LOGE("NULL camera device");
12860 return -ENODEV;
12861 }
12862
12863 int rc = hw->initialize(callback_ops);
12864 LOGD("X");
12865 return rc;
12866}
12867
12868/*===========================================================================
12869 * FUNCTION : configure_streams
12870 *
12871 * DESCRIPTION:
12872 *
12873 * PARAMETERS :
12874 *
12875 *
12876 * RETURN : Success: 0
12877 * Failure: -EINVAL (if stream configuration is invalid)
12878 * -ENODEV (fatal error)
12879 *==========================================================================*/
12880
12881int QCamera3HardwareInterface::configure_streams(
12882 const struct camera3_device *device,
12883 camera3_stream_configuration_t *stream_list)
12884{
12885 LOGD("E");
12886 QCamera3HardwareInterface *hw =
12887 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12888 if (!hw) {
12889 LOGE("NULL camera device");
12890 return -ENODEV;
12891 }
12892 int rc = hw->configureStreams(stream_list);
12893 LOGD("X");
12894 return rc;
12895}
12896
12897/*===========================================================================
12898 * FUNCTION : construct_default_request_settings
12899 *
12900 * DESCRIPTION: Configure a settings buffer to meet the required use case
12901 *
12902 * PARAMETERS :
12903 *
12904 *
12905 * RETURN : Success: Return valid metadata
12906 * Failure: Return NULL
12907 *==========================================================================*/
12908const camera_metadata_t* QCamera3HardwareInterface::
12909 construct_default_request_settings(const struct camera3_device *device,
12910 int type)
12911{
12912
12913 LOGD("E");
12914 camera_metadata_t* fwk_metadata = NULL;
12915 QCamera3HardwareInterface *hw =
12916 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12917 if (!hw) {
12918 LOGE("NULL camera device");
12919 return NULL;
12920 }
12921
12922 fwk_metadata = hw->translateCapabilityToMetadata(type);
12923
12924 LOGD("X");
12925 return fwk_metadata;
12926}
12927
12928/*===========================================================================
12929 * FUNCTION : process_capture_request
12930 *
12931 * DESCRIPTION:
12932 *
12933 * PARAMETERS :
12934 *
12935 *
12936 * RETURN :
12937 *==========================================================================*/
12938int QCamera3HardwareInterface::process_capture_request(
12939 const struct camera3_device *device,
12940 camera3_capture_request_t *request)
12941{
12942 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012943 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012944 QCamera3HardwareInterface *hw =
12945 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12946 if (!hw) {
12947 LOGE("NULL camera device");
12948 return -EINVAL;
12949 }
12950
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012951 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012952 LOGD("X");
12953 return rc;
12954}
12955
12956/*===========================================================================
12957 * FUNCTION : dump
12958 *
12959 * DESCRIPTION:
12960 *
12961 * PARAMETERS :
12962 *
12963 *
12964 * RETURN :
12965 *==========================================================================*/
12966
12967void QCamera3HardwareInterface::dump(
12968 const struct camera3_device *device, int fd)
12969{
12970 /* Log level property is read when "adb shell dumpsys media.camera" is
12971 called so that the log level can be controlled without restarting
12972 the media server */
12973 getLogLevel();
12974
12975 LOGD("E");
12976 QCamera3HardwareInterface *hw =
12977 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12978 if (!hw) {
12979 LOGE("NULL camera device");
12980 return;
12981 }
12982
12983 hw->dump(fd);
12984 LOGD("X");
12985 return;
12986}
12987
12988/*===========================================================================
12989 * FUNCTION : flush
12990 *
12991 * DESCRIPTION:
12992 *
12993 * PARAMETERS :
12994 *
12995 *
12996 * RETURN :
12997 *==========================================================================*/
12998
12999int QCamera3HardwareInterface::flush(
13000 const struct camera3_device *device)
13001{
13002 int rc;
13003 LOGD("E");
13004 QCamera3HardwareInterface *hw =
13005 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13006 if (!hw) {
13007 LOGE("NULL camera device");
13008 return -EINVAL;
13009 }
13010
13011 pthread_mutex_lock(&hw->mMutex);
13012 // Validate current state
13013 switch (hw->mState) {
13014 case STARTED:
13015 /* valid state */
13016 break;
13017
13018 case ERROR:
13019 pthread_mutex_unlock(&hw->mMutex);
13020 hw->handleCameraDeviceError();
13021 return -ENODEV;
13022
13023 default:
13024 LOGI("Flush returned during state %d", hw->mState);
13025 pthread_mutex_unlock(&hw->mMutex);
13026 return 0;
13027 }
13028 pthread_mutex_unlock(&hw->mMutex);
13029
13030 rc = hw->flush(true /* restart channels */ );
13031 LOGD("X");
13032 return rc;
13033}
13034
13035/*===========================================================================
13036 * FUNCTION : close_camera_device
13037 *
13038 * DESCRIPTION:
13039 *
13040 * PARAMETERS :
13041 *
13042 *
13043 * RETURN :
13044 *==========================================================================*/
13045int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13046{
13047 int ret = NO_ERROR;
13048 QCamera3HardwareInterface *hw =
13049 reinterpret_cast<QCamera3HardwareInterface *>(
13050 reinterpret_cast<camera3_device_t *>(device)->priv);
13051 if (!hw) {
13052 LOGE("NULL camera device");
13053 return BAD_VALUE;
13054 }
13055
13056 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13057 delete hw;
13058 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013059 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013060 return ret;
13061}
13062
13063/*===========================================================================
13064 * FUNCTION : getWaveletDenoiseProcessPlate
13065 *
13066 * DESCRIPTION: query wavelet denoise process plate
13067 *
13068 * PARAMETERS : None
13069 *
13070 * RETURN : WNR prcocess plate value
13071 *==========================================================================*/
13072cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13073{
13074 char prop[PROPERTY_VALUE_MAX];
13075 memset(prop, 0, sizeof(prop));
13076 property_get("persist.denoise.process.plates", prop, "0");
13077 int processPlate = atoi(prop);
13078 switch(processPlate) {
13079 case 0:
13080 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13081 case 1:
13082 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13083 case 2:
13084 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13085 case 3:
13086 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13087 default:
13088 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13089 }
13090}
13091
13092
13093/*===========================================================================
13094 * FUNCTION : getTemporalDenoiseProcessPlate
13095 *
13096 * DESCRIPTION: query temporal denoise process plate
13097 *
13098 * PARAMETERS : None
13099 *
13100 * RETURN : TNR prcocess plate value
13101 *==========================================================================*/
13102cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13103{
13104 char prop[PROPERTY_VALUE_MAX];
13105 memset(prop, 0, sizeof(prop));
13106 property_get("persist.tnr.process.plates", prop, "0");
13107 int processPlate = atoi(prop);
13108 switch(processPlate) {
13109 case 0:
13110 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13111 case 1:
13112 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13113 case 2:
13114 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13115 case 3:
13116 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13117 default:
13118 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13119 }
13120}
13121
13122
13123/*===========================================================================
13124 * FUNCTION : extractSceneMode
13125 *
13126 * DESCRIPTION: Extract scene mode from frameworks set metadata
13127 *
13128 * PARAMETERS :
13129 * @frame_settings: CameraMetadata reference
13130 * @metaMode: ANDROID_CONTORL_MODE
13131 * @hal_metadata: hal metadata structure
13132 *
13133 * RETURN : None
13134 *==========================================================================*/
13135int32_t QCamera3HardwareInterface::extractSceneMode(
13136 const CameraMetadata &frame_settings, uint8_t metaMode,
13137 metadata_buffer_t *hal_metadata)
13138{
13139 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013140 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13141
13142 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13143 LOGD("Ignoring control mode OFF_KEEP_STATE");
13144 return NO_ERROR;
13145 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013146
13147 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13148 camera_metadata_ro_entry entry =
13149 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13150 if (0 == entry.count)
13151 return rc;
13152
13153 uint8_t fwk_sceneMode = entry.data.u8[0];
13154
13155 int val = lookupHalName(SCENE_MODES_MAP,
13156 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13157 fwk_sceneMode);
13158 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013159 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013160 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013161 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013162 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013163
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013164 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13165 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13166 }
13167
13168 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13169 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013170 cam_hdr_param_t hdr_params;
13171 hdr_params.hdr_enable = 1;
13172 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13173 hdr_params.hdr_need_1x = false;
13174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13175 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13176 rc = BAD_VALUE;
13177 }
13178 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013179
Thierry Strudel3d639192016-09-09 11:52:26 -070013180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13181 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13182 rc = BAD_VALUE;
13183 }
13184 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013185
13186 if (mForceHdrSnapshot) {
13187 cam_hdr_param_t hdr_params;
13188 hdr_params.hdr_enable = 1;
13189 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13190 hdr_params.hdr_need_1x = false;
13191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13192 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13193 rc = BAD_VALUE;
13194 }
13195 }
13196
Thierry Strudel3d639192016-09-09 11:52:26 -070013197 return rc;
13198}
13199
13200/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013201 * FUNCTION : setVideoHdrMode
13202 *
13203 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13204 *
13205 * PARAMETERS :
13206 * @hal_metadata: hal metadata structure
13207 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13208 *
13209 * RETURN : None
13210 *==========================================================================*/
13211int32_t QCamera3HardwareInterface::setVideoHdrMode(
13212 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13213{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013214 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13215 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13216 }
13217
13218 LOGE("Invalid Video HDR mode %d!", vhdr);
13219 return BAD_VALUE;
13220}
13221
13222/*===========================================================================
13223 * FUNCTION : setSensorHDR
13224 *
13225 * DESCRIPTION: Enable/disable sensor HDR.
13226 *
13227 * PARAMETERS :
13228 * @hal_metadata: hal metadata structure
13229 * @enable: boolean whether to enable/disable sensor HDR
13230 *
13231 * RETURN : None
13232 *==========================================================================*/
13233int32_t QCamera3HardwareInterface::setSensorHDR(
13234 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13235{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013236 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013237 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13238
13239 if (enable) {
13240 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13241 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13242 #ifdef _LE_CAMERA_
13243 //Default to staggered HDR for IOT
13244 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13245 #else
13246 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13247 #endif
13248 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13249 }
13250
13251 bool isSupported = false;
13252 switch (sensor_hdr) {
13253 case CAM_SENSOR_HDR_IN_SENSOR:
13254 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13255 CAM_QCOM_FEATURE_SENSOR_HDR) {
13256 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013257 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013258 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013259 break;
13260 case CAM_SENSOR_HDR_ZIGZAG:
13261 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13262 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13263 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013264 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013265 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013266 break;
13267 case CAM_SENSOR_HDR_STAGGERED:
13268 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13269 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13270 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013271 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013272 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013273 break;
13274 case CAM_SENSOR_HDR_OFF:
13275 isSupported = true;
13276 LOGD("Turning off sensor HDR");
13277 break;
13278 default:
13279 LOGE("HDR mode %d not supported", sensor_hdr);
13280 rc = BAD_VALUE;
13281 break;
13282 }
13283
13284 if(isSupported) {
13285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13286 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13287 rc = BAD_VALUE;
13288 } else {
13289 if(!isVideoHdrEnable)
13290 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013291 }
13292 }
13293 return rc;
13294}
13295
13296/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013297 * FUNCTION : needRotationReprocess
13298 *
13299 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13300 *
13301 * PARAMETERS : none
13302 *
13303 * RETURN : true: needed
13304 * false: no need
13305 *==========================================================================*/
13306bool QCamera3HardwareInterface::needRotationReprocess()
13307{
13308 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13309 // current rotation is not zero, and pp has the capability to process rotation
13310 LOGH("need do reprocess for rotation");
13311 return true;
13312 }
13313
13314 return false;
13315}
13316
13317/*===========================================================================
13318 * FUNCTION : needReprocess
13319 *
13320 * DESCRIPTION: if reprocess in needed
13321 *
13322 * PARAMETERS : none
13323 *
13324 * RETURN : true: needed
13325 * false: no need
13326 *==========================================================================*/
13327bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13328{
13329 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13330 // TODO: add for ZSL HDR later
13331 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13332 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13333 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13334 return true;
13335 } else {
13336 LOGH("already post processed frame");
13337 return false;
13338 }
13339 }
13340 return needRotationReprocess();
13341}
13342
13343/*===========================================================================
13344 * FUNCTION : needJpegExifRotation
13345 *
13346 * DESCRIPTION: if rotation from jpeg is needed
13347 *
13348 * PARAMETERS : none
13349 *
13350 * RETURN : true: needed
13351 * false: no need
13352 *==========================================================================*/
13353bool QCamera3HardwareInterface::needJpegExifRotation()
13354{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013355 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013356 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13357 LOGD("Need use Jpeg EXIF Rotation");
13358 return true;
13359 }
13360 return false;
13361}
13362
13363/*===========================================================================
13364 * FUNCTION : addOfflineReprocChannel
13365 *
13366 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13367 * coming from input channel
13368 *
13369 * PARAMETERS :
13370 * @config : reprocess configuration
13371 * @inputChHandle : pointer to the input (source) channel
13372 *
13373 *
13374 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13375 *==========================================================================*/
13376QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13377 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13378{
13379 int32_t rc = NO_ERROR;
13380 QCamera3ReprocessChannel *pChannel = NULL;
13381
13382 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013383 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13384 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013385 if (NULL == pChannel) {
13386 LOGE("no mem for reprocess channel");
13387 return NULL;
13388 }
13389
13390 rc = pChannel->initialize(IS_TYPE_NONE);
13391 if (rc != NO_ERROR) {
13392 LOGE("init reprocess channel failed, ret = %d", rc);
13393 delete pChannel;
13394 return NULL;
13395 }
13396
13397 // pp feature config
13398 cam_pp_feature_config_t pp_config;
13399 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13400
13401 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13402 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13403 & CAM_QCOM_FEATURE_DSDN) {
13404 //Use CPP CDS incase h/w supports it.
13405 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13406 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13407 }
13408 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13409 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13410 }
13411
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013412 if (config.hdr_param.hdr_enable) {
13413 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13414 pp_config.hdr_param = config.hdr_param;
13415 }
13416
13417 if (mForceHdrSnapshot) {
13418 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13419 pp_config.hdr_param.hdr_enable = 1;
13420 pp_config.hdr_param.hdr_need_1x = 0;
13421 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13422 }
13423
Thierry Strudel3d639192016-09-09 11:52:26 -070013424 rc = pChannel->addReprocStreamsFromSource(pp_config,
13425 config,
13426 IS_TYPE_NONE,
13427 mMetadataChannel);
13428
13429 if (rc != NO_ERROR) {
13430 delete pChannel;
13431 return NULL;
13432 }
13433 return pChannel;
13434}
13435
13436/*===========================================================================
13437 * FUNCTION : getMobicatMask
13438 *
13439 * DESCRIPTION: returns mobicat mask
13440 *
13441 * PARAMETERS : none
13442 *
13443 * RETURN : mobicat mask
13444 *
13445 *==========================================================================*/
13446uint8_t QCamera3HardwareInterface::getMobicatMask()
13447{
13448 return m_MobicatMask;
13449}
13450
13451/*===========================================================================
13452 * FUNCTION : setMobicat
13453 *
13454 * DESCRIPTION: set Mobicat on/off.
13455 *
13456 * PARAMETERS :
13457 * @params : none
13458 *
13459 * RETURN : int32_t type of status
13460 * NO_ERROR -- success
13461 * none-zero failure code
13462 *==========================================================================*/
13463int32_t QCamera3HardwareInterface::setMobicat()
13464{
13465 char value [PROPERTY_VALUE_MAX];
13466 property_get("persist.camera.mobicat", value, "0");
13467 int32_t ret = NO_ERROR;
13468 uint8_t enableMobi = (uint8_t)atoi(value);
13469
13470 if (enableMobi) {
13471 tune_cmd_t tune_cmd;
13472 tune_cmd.type = SET_RELOAD_CHROMATIX;
13473 tune_cmd.module = MODULE_ALL;
13474 tune_cmd.value = TRUE;
13475 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13476 CAM_INTF_PARM_SET_VFE_COMMAND,
13477 tune_cmd);
13478
13479 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13480 CAM_INTF_PARM_SET_PP_COMMAND,
13481 tune_cmd);
13482 }
13483 m_MobicatMask = enableMobi;
13484
13485 return ret;
13486}
13487
13488/*===========================================================================
13489* FUNCTION : getLogLevel
13490*
13491* DESCRIPTION: Reads the log level property into a variable
13492*
13493* PARAMETERS :
13494* None
13495*
13496* RETURN :
13497* None
13498*==========================================================================*/
13499void QCamera3HardwareInterface::getLogLevel()
13500{
13501 char prop[PROPERTY_VALUE_MAX];
13502 uint32_t globalLogLevel = 0;
13503
13504 property_get("persist.camera.hal.debug", prop, "0");
13505 int val = atoi(prop);
13506 if (0 <= val) {
13507 gCamHal3LogLevel = (uint32_t)val;
13508 }
13509
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013510 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013511 gKpiDebugLevel = atoi(prop);
13512
13513 property_get("persist.camera.global.debug", prop, "0");
13514 val = atoi(prop);
13515 if (0 <= val) {
13516 globalLogLevel = (uint32_t)val;
13517 }
13518
13519 /* Highest log level among hal.logs and global.logs is selected */
13520 if (gCamHal3LogLevel < globalLogLevel)
13521 gCamHal3LogLevel = globalLogLevel;
13522
13523 return;
13524}
13525
13526/*===========================================================================
13527 * FUNCTION : validateStreamRotations
13528 *
13529 * DESCRIPTION: Check if the rotations requested are supported
13530 *
13531 * PARAMETERS :
13532 * @stream_list : streams to be configured
13533 *
13534 * RETURN : NO_ERROR on success
13535 * -EINVAL on failure
13536 *
13537 *==========================================================================*/
13538int QCamera3HardwareInterface::validateStreamRotations(
13539 camera3_stream_configuration_t *streamList)
13540{
13541 int rc = NO_ERROR;
13542
13543 /*
13544 * Loop through all streams requested in configuration
13545 * Check if unsupported rotations have been requested on any of them
13546 */
13547 for (size_t j = 0; j < streamList->num_streams; j++){
13548 camera3_stream_t *newStream = streamList->streams[j];
13549
13550 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13551 bool isImplDef = (newStream->format ==
13552 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13553 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13554 isImplDef);
13555
13556 if (isRotated && (!isImplDef || isZsl)) {
13557 LOGE("Error: Unsupported rotation of %d requested for stream"
13558 "type:%d and stream format:%d",
13559 newStream->rotation, newStream->stream_type,
13560 newStream->format);
13561 rc = -EINVAL;
13562 break;
13563 }
13564 }
13565
13566 return rc;
13567}
13568
13569/*===========================================================================
13570* FUNCTION : getFlashInfo
13571*
13572* DESCRIPTION: Retrieve information about whether the device has a flash.
13573*
13574* PARAMETERS :
13575* @cameraId : Camera id to query
13576* @hasFlash : Boolean indicating whether there is a flash device
13577* associated with given camera
13578* @flashNode : If a flash device exists, this will be its device node.
13579*
13580* RETURN :
13581* None
13582*==========================================================================*/
13583void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13584 bool& hasFlash,
13585 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13586{
13587 cam_capability_t* camCapability = gCamCapability[cameraId];
13588 if (NULL == camCapability) {
13589 hasFlash = false;
13590 flashNode[0] = '\0';
13591 } else {
13592 hasFlash = camCapability->flash_available;
13593 strlcpy(flashNode,
13594 (char*)camCapability->flash_dev_name,
13595 QCAMERA_MAX_FILEPATH_LENGTH);
13596 }
13597}
13598
13599/*===========================================================================
13600* FUNCTION : getEepromVersionInfo
13601*
13602* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13603*
13604* PARAMETERS : None
13605*
13606* RETURN : string describing EEPROM version
13607* "\0" if no such info available
13608*==========================================================================*/
13609const char *QCamera3HardwareInterface::getEepromVersionInfo()
13610{
13611 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13612}
13613
13614/*===========================================================================
13615* FUNCTION : getLdafCalib
13616*
13617* DESCRIPTION: Retrieve Laser AF calibration data
13618*
13619* PARAMETERS : None
13620*
13621* RETURN : Two uint32_t describing laser AF calibration data
13622* NULL if none is available.
13623*==========================================================================*/
13624const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13625{
13626 if (mLdafCalibExist) {
13627 return &mLdafCalib[0];
13628 } else {
13629 return NULL;
13630 }
13631}
13632
13633/*===========================================================================
13634 * FUNCTION : dynamicUpdateMetaStreamInfo
13635 *
13636 * DESCRIPTION: This function:
13637 * (1) stops all the channels
13638 * (2) returns error on pending requests and buffers
13639 * (3) sends metastream_info in setparams
13640 * (4) starts all channels
13641 * This is useful when sensor has to be restarted to apply any
13642 * settings such as frame rate from a different sensor mode
13643 *
13644 * PARAMETERS : None
13645 *
13646 * RETURN : NO_ERROR on success
13647 * Error codes on failure
13648 *
13649 *==========================================================================*/
13650int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13651{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013652 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013653 int rc = NO_ERROR;
13654
13655 LOGD("E");
13656
13657 rc = stopAllChannels();
13658 if (rc < 0) {
13659 LOGE("stopAllChannels failed");
13660 return rc;
13661 }
13662
13663 rc = notifyErrorForPendingRequests();
13664 if (rc < 0) {
13665 LOGE("notifyErrorForPendingRequests failed");
13666 return rc;
13667 }
13668
13669 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13670 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13671 "Format:%d",
13672 mStreamConfigInfo.type[i],
13673 mStreamConfigInfo.stream_sizes[i].width,
13674 mStreamConfigInfo.stream_sizes[i].height,
13675 mStreamConfigInfo.postprocess_mask[i],
13676 mStreamConfigInfo.format[i]);
13677 }
13678
13679 /* Send meta stream info once again so that ISP can start */
13680 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13681 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13682 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13683 mParameters);
13684 if (rc < 0) {
13685 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13686 }
13687
13688 rc = startAllChannels();
13689 if (rc < 0) {
13690 LOGE("startAllChannels failed");
13691 return rc;
13692 }
13693
13694 LOGD("X");
13695 return rc;
13696}
13697
13698/*===========================================================================
13699 * FUNCTION : stopAllChannels
13700 *
13701 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13702 *
13703 * PARAMETERS : None
13704 *
13705 * RETURN : NO_ERROR on success
13706 * Error codes on failure
13707 *
13708 *==========================================================================*/
13709int32_t QCamera3HardwareInterface::stopAllChannels()
13710{
13711 int32_t rc = NO_ERROR;
13712
13713 LOGD("Stopping all channels");
13714 // Stop the Streams/Channels
13715 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13716 it != mStreamInfo.end(); it++) {
13717 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13718 if (channel) {
13719 channel->stop();
13720 }
13721 (*it)->status = INVALID;
13722 }
13723
13724 if (mSupportChannel) {
13725 mSupportChannel->stop();
13726 }
13727 if (mAnalysisChannel) {
13728 mAnalysisChannel->stop();
13729 }
13730 if (mRawDumpChannel) {
13731 mRawDumpChannel->stop();
13732 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013733 if (mHdrPlusRawSrcChannel) {
13734 mHdrPlusRawSrcChannel->stop();
13735 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013736 if (mMetadataChannel) {
13737 /* If content of mStreamInfo is not 0, there is metadata stream */
13738 mMetadataChannel->stop();
13739 }
13740
13741 LOGD("All channels stopped");
13742 return rc;
13743}
13744
13745/*===========================================================================
13746 * FUNCTION : startAllChannels
13747 *
13748 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13749 *
13750 * PARAMETERS : None
13751 *
13752 * RETURN : NO_ERROR on success
13753 * Error codes on failure
13754 *
13755 *==========================================================================*/
13756int32_t QCamera3HardwareInterface::startAllChannels()
13757{
13758 int32_t rc = NO_ERROR;
13759
13760 LOGD("Start all channels ");
13761 // Start the Streams/Channels
13762 if (mMetadataChannel) {
13763 /* If content of mStreamInfo is not 0, there is metadata stream */
13764 rc = mMetadataChannel->start();
13765 if (rc < 0) {
13766 LOGE("META channel start failed");
13767 return rc;
13768 }
13769 }
13770 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13771 it != mStreamInfo.end(); it++) {
13772 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13773 if (channel) {
13774 rc = channel->start();
13775 if (rc < 0) {
13776 LOGE("channel start failed");
13777 return rc;
13778 }
13779 }
13780 }
13781 if (mAnalysisChannel) {
13782 mAnalysisChannel->start();
13783 }
13784 if (mSupportChannel) {
13785 rc = mSupportChannel->start();
13786 if (rc < 0) {
13787 LOGE("Support channel start failed");
13788 return rc;
13789 }
13790 }
13791 if (mRawDumpChannel) {
13792 rc = mRawDumpChannel->start();
13793 if (rc < 0) {
13794 LOGE("RAW dump channel start failed");
13795 return rc;
13796 }
13797 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013798 if (mHdrPlusRawSrcChannel) {
13799 rc = mHdrPlusRawSrcChannel->start();
13800 if (rc < 0) {
13801 LOGE("HDR+ RAW channel start failed");
13802 return rc;
13803 }
13804 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013805
13806 LOGD("All channels started");
13807 return rc;
13808}
13809
13810/*===========================================================================
13811 * FUNCTION : notifyErrorForPendingRequests
13812 *
13813 * DESCRIPTION: This function sends error for all the pending requests/buffers
13814 *
13815 * PARAMETERS : None
13816 *
13817 * RETURN : Error codes
13818 * NO_ERROR on success
13819 *
13820 *==========================================================================*/
13821int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13822{
Emilian Peev7650c122017-01-19 08:24:33 -080013823 notifyErrorFoPendingDepthData(mDepthChannel);
13824
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013825 auto pendingRequest = mPendingRequestsList.begin();
13826 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013827
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013828 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13829 // buffers (for which buffers aren't sent yet).
13830 while (pendingRequest != mPendingRequestsList.end() ||
13831 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13832 if (pendingRequest == mPendingRequestsList.end() ||
13833 pendingBuffer->frame_number < pendingRequest->frame_number) {
13834 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13835 // with error.
13836 for (auto &info : pendingBuffer->mPendingBufferList) {
13837 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013838 camera3_notify_msg_t notify_msg;
13839 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13840 notify_msg.type = CAMERA3_MSG_ERROR;
13841 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013842 notify_msg.message.error.error_stream = info.stream;
13843 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013844 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013845
13846 camera3_stream_buffer_t buffer = {};
13847 buffer.acquire_fence = -1;
13848 buffer.release_fence = -1;
13849 buffer.buffer = info.buffer;
13850 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13851 buffer.stream = info.stream;
13852 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013853 }
13854
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013855 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13856 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13857 pendingBuffer->frame_number > pendingRequest->frame_number) {
13858 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013859 camera3_notify_msg_t notify_msg;
13860 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13861 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013862 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13863 notify_msg.message.error.error_stream = nullptr;
13864 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013865 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013866
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013867 if (pendingRequest->input_buffer != nullptr) {
13868 camera3_capture_result result = {};
13869 result.frame_number = pendingRequest->frame_number;
13870 result.result = nullptr;
13871 result.input_buffer = pendingRequest->input_buffer;
13872 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013873 }
13874
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013875 mShutterDispatcher.clear(pendingRequest->frame_number);
13876 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13877 } else {
13878 // If both buffers and result metadata weren't sent yet, notify about a request error
13879 // and return buffers with error.
13880 for (auto &info : pendingBuffer->mPendingBufferList) {
13881 camera3_notify_msg_t notify_msg;
13882 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13883 notify_msg.type = CAMERA3_MSG_ERROR;
13884 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13885 notify_msg.message.error.error_stream = info.stream;
13886 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13887 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013888
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013889 camera3_stream_buffer_t buffer = {};
13890 buffer.acquire_fence = -1;
13891 buffer.release_fence = -1;
13892 buffer.buffer = info.buffer;
13893 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13894 buffer.stream = info.stream;
13895 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13896 }
13897
13898 if (pendingRequest->input_buffer != nullptr) {
13899 camera3_capture_result result = {};
13900 result.frame_number = pendingRequest->frame_number;
13901 result.result = nullptr;
13902 result.input_buffer = pendingRequest->input_buffer;
13903 orchestrateResult(&result);
13904 }
13905
13906 mShutterDispatcher.clear(pendingRequest->frame_number);
13907 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13908 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013909 }
13910 }
13911
13912 /* Reset pending frame Drop list and requests list */
13913 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013914 mShutterDispatcher.clear();
13915 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013916 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013917 LOGH("Cleared all the pending buffers ");
13918
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013919 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013920}
13921
13922bool QCamera3HardwareInterface::isOnEncoder(
13923 const cam_dimension_t max_viewfinder_size,
13924 uint32_t width, uint32_t height)
13925{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013926 return ((width > (uint32_t)max_viewfinder_size.width) ||
13927 (height > (uint32_t)max_viewfinder_size.height) ||
13928 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13929 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013930}
13931
13932/*===========================================================================
13933 * FUNCTION : setBundleInfo
13934 *
13935 * DESCRIPTION: Set bundle info for all streams that are bundle.
13936 *
13937 * PARAMETERS : None
13938 *
13939 * RETURN : NO_ERROR on success
13940 * Error codes on failure
13941 *==========================================================================*/
13942int32_t QCamera3HardwareInterface::setBundleInfo()
13943{
13944 int32_t rc = NO_ERROR;
13945
13946 if (mChannelHandle) {
13947 cam_bundle_config_t bundleInfo;
13948 memset(&bundleInfo, 0, sizeof(bundleInfo));
13949 rc = mCameraHandle->ops->get_bundle_info(
13950 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13951 if (rc != NO_ERROR) {
13952 LOGE("get_bundle_info failed");
13953 return rc;
13954 }
13955 if (mAnalysisChannel) {
13956 mAnalysisChannel->setBundleInfo(bundleInfo);
13957 }
13958 if (mSupportChannel) {
13959 mSupportChannel->setBundleInfo(bundleInfo);
13960 }
13961 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13962 it != mStreamInfo.end(); it++) {
13963 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13964 channel->setBundleInfo(bundleInfo);
13965 }
13966 if (mRawDumpChannel) {
13967 mRawDumpChannel->setBundleInfo(bundleInfo);
13968 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013969 if (mHdrPlusRawSrcChannel) {
13970 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13971 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013972 }
13973
13974 return rc;
13975}
13976
13977/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013978 * FUNCTION : setInstantAEC
13979 *
13980 * DESCRIPTION: Set Instant AEC related params.
13981 *
13982 * PARAMETERS :
13983 * @meta: CameraMetadata reference
13984 *
13985 * RETURN : NO_ERROR on success
13986 * Error codes on failure
13987 *==========================================================================*/
13988int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13989{
13990 int32_t rc = NO_ERROR;
13991 uint8_t val = 0;
13992 char prop[PROPERTY_VALUE_MAX];
13993
13994 // First try to configure instant AEC from framework metadata
13995 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13996 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13997 }
13998
13999 // If framework did not set this value, try to read from set prop.
14000 if (val == 0) {
14001 memset(prop, 0, sizeof(prop));
14002 property_get("persist.camera.instant.aec", prop, "0");
14003 val = (uint8_t)atoi(prop);
14004 }
14005
14006 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14007 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14008 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14009 mInstantAEC = val;
14010 mInstantAECSettledFrameNumber = 0;
14011 mInstantAecFrameIdxCount = 0;
14012 LOGH("instantAEC value set %d",val);
14013 if (mInstantAEC) {
14014 memset(prop, 0, sizeof(prop));
14015 property_get("persist.camera.ae.instant.bound", prop, "10");
14016 int32_t aec_frame_skip_cnt = atoi(prop);
14017 if (aec_frame_skip_cnt >= 0) {
14018 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14019 } else {
14020 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14021 rc = BAD_VALUE;
14022 }
14023 }
14024 } else {
14025 LOGE("Bad instant aec value set %d", val);
14026 rc = BAD_VALUE;
14027 }
14028 return rc;
14029}
14030
14031/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014032 * FUNCTION : get_num_overall_buffers
14033 *
14034 * DESCRIPTION: Estimate number of pending buffers across all requests.
14035 *
14036 * PARAMETERS : None
14037 *
14038 * RETURN : Number of overall pending buffers
14039 *
14040 *==========================================================================*/
14041uint32_t PendingBuffersMap::get_num_overall_buffers()
14042{
14043 uint32_t sum_buffers = 0;
14044 for (auto &req : mPendingBuffersInRequest) {
14045 sum_buffers += req.mPendingBufferList.size();
14046 }
14047 return sum_buffers;
14048}
14049
14050/*===========================================================================
14051 * FUNCTION : removeBuf
14052 *
14053 * DESCRIPTION: Remove a matching buffer from tracker.
14054 *
14055 * PARAMETERS : @buffer: image buffer for the callback
14056 *
14057 * RETURN : None
14058 *
14059 *==========================================================================*/
14060void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14061{
14062 bool buffer_found = false;
14063 for (auto req = mPendingBuffersInRequest.begin();
14064 req != mPendingBuffersInRequest.end(); req++) {
14065 for (auto k = req->mPendingBufferList.begin();
14066 k != req->mPendingBufferList.end(); k++ ) {
14067 if (k->buffer == buffer) {
14068 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14069 req->frame_number, buffer);
14070 k = req->mPendingBufferList.erase(k);
14071 if (req->mPendingBufferList.empty()) {
14072 // Remove this request from Map
14073 req = mPendingBuffersInRequest.erase(req);
14074 }
14075 buffer_found = true;
14076 break;
14077 }
14078 }
14079 if (buffer_found) {
14080 break;
14081 }
14082 }
14083 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14084 get_num_overall_buffers());
14085}
14086
14087/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014088 * FUNCTION : getBufErrStatus
14089 *
14090 * DESCRIPTION: get buffer error status
14091 *
14092 * PARAMETERS : @buffer: buffer handle
14093 *
14094 * RETURN : Error status
14095 *
14096 *==========================================================================*/
14097int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14098{
14099 for (auto& req : mPendingBuffersInRequest) {
14100 for (auto& k : req.mPendingBufferList) {
14101 if (k.buffer == buffer)
14102 return k.bufStatus;
14103 }
14104 }
14105 return CAMERA3_BUFFER_STATUS_OK;
14106}
14107
14108/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014109 * FUNCTION : setPAAFSupport
14110 *
14111 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14112 * feature mask according to stream type and filter
14113 * arrangement
14114 *
14115 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14116 * @stream_type: stream type
14117 * @filter_arrangement: filter arrangement
14118 *
14119 * RETURN : None
14120 *==========================================================================*/
14121void QCamera3HardwareInterface::setPAAFSupport(
14122 cam_feature_mask_t& feature_mask,
14123 cam_stream_type_t stream_type,
14124 cam_color_filter_arrangement_t filter_arrangement)
14125{
Thierry Strudel3d639192016-09-09 11:52:26 -070014126 switch (filter_arrangement) {
14127 case CAM_FILTER_ARRANGEMENT_RGGB:
14128 case CAM_FILTER_ARRANGEMENT_GRBG:
14129 case CAM_FILTER_ARRANGEMENT_GBRG:
14130 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014131 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14132 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014133 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014134 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14135 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014136 }
14137 break;
14138 case CAM_FILTER_ARRANGEMENT_Y:
14139 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14140 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14141 }
14142 break;
14143 default:
14144 break;
14145 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014146 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14147 feature_mask, stream_type, filter_arrangement);
14148
14149
Thierry Strudel3d639192016-09-09 11:52:26 -070014150}
14151
14152/*===========================================================================
14153* FUNCTION : getSensorMountAngle
14154*
14155* DESCRIPTION: Retrieve sensor mount angle
14156*
14157* PARAMETERS : None
14158*
14159* RETURN : sensor mount angle in uint32_t
14160*==========================================================================*/
14161uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14162{
14163 return gCamCapability[mCameraId]->sensor_mount_angle;
14164}
14165
14166/*===========================================================================
14167* FUNCTION : getRelatedCalibrationData
14168*
14169* DESCRIPTION: Retrieve related system calibration data
14170*
14171* PARAMETERS : None
14172*
14173* RETURN : Pointer of related system calibration data
14174*==========================================================================*/
14175const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14176{
14177 return (const cam_related_system_calibration_data_t *)
14178 &(gCamCapability[mCameraId]->related_cam_calibration);
14179}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014180
14181/*===========================================================================
14182 * FUNCTION : is60HzZone
14183 *
14184 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14185 *
14186 * PARAMETERS : None
14187 *
14188 * RETURN : True if in 60Hz zone, False otherwise
14189 *==========================================================================*/
14190bool QCamera3HardwareInterface::is60HzZone()
14191{
14192 time_t t = time(NULL);
14193 struct tm lt;
14194
14195 struct tm* r = localtime_r(&t, &lt);
14196
14197 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14198 return true;
14199 else
14200 return false;
14201}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014202
14203/*===========================================================================
14204 * FUNCTION : adjustBlackLevelForCFA
14205 *
14206 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14207 * of bayer CFA (Color Filter Array).
14208 *
14209 * PARAMETERS : @input: black level pattern in the order of RGGB
14210 * @output: black level pattern in the order of CFA
14211 * @color_arrangement: CFA color arrangement
14212 *
14213 * RETURN : None
14214 *==========================================================================*/
14215template<typename T>
14216void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14217 T input[BLACK_LEVEL_PATTERN_CNT],
14218 T output[BLACK_LEVEL_PATTERN_CNT],
14219 cam_color_filter_arrangement_t color_arrangement)
14220{
14221 switch (color_arrangement) {
14222 case CAM_FILTER_ARRANGEMENT_GRBG:
14223 output[0] = input[1];
14224 output[1] = input[0];
14225 output[2] = input[3];
14226 output[3] = input[2];
14227 break;
14228 case CAM_FILTER_ARRANGEMENT_GBRG:
14229 output[0] = input[2];
14230 output[1] = input[3];
14231 output[2] = input[0];
14232 output[3] = input[1];
14233 break;
14234 case CAM_FILTER_ARRANGEMENT_BGGR:
14235 output[0] = input[3];
14236 output[1] = input[2];
14237 output[2] = input[1];
14238 output[3] = input[0];
14239 break;
14240 case CAM_FILTER_ARRANGEMENT_RGGB:
14241 output[0] = input[0];
14242 output[1] = input[1];
14243 output[2] = input[2];
14244 output[3] = input[3];
14245 break;
14246 default:
14247 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14248 break;
14249 }
14250}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014251
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014252void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14253 CameraMetadata &resultMetadata,
14254 std::shared_ptr<metadata_buffer_t> settings)
14255{
14256 if (settings == nullptr) {
14257 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14258 return;
14259 }
14260
14261 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14262 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14263 }
14264
14265 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14266 String8 str((const char *)gps_methods);
14267 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14268 }
14269
14270 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14271 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14272 }
14273
14274 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14275 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14276 }
14277
14278 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14279 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14280 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14281 }
14282
14283 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14284 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14285 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14286 }
14287
14288 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14289 int32_t fwk_thumb_size[2];
14290 fwk_thumb_size[0] = thumb_size->width;
14291 fwk_thumb_size[1] = thumb_size->height;
14292 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14293 }
14294
14295 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14296 uint8_t fwk_intent = intent[0];
14297 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14298 }
14299}
14300
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014301bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14302 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14303 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014304{
14305 if (hdrPlusRequest == nullptr) return false;
14306
14307 // Check noise reduction mode is high quality.
14308 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14309 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14310 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014311 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14312 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014313 return false;
14314 }
14315
14316 // Check edge mode is high quality.
14317 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14318 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14319 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14320 return false;
14321 }
14322
14323 if (request.num_output_buffers != 1 ||
14324 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14325 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014326 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14327 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14328 request.output_buffers[0].stream->width,
14329 request.output_buffers[0].stream->height,
14330 request.output_buffers[0].stream->format);
14331 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014332 return false;
14333 }
14334
14335 // Get a YUV buffer from pic channel.
14336 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14337 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14338 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14339 if (res != OK) {
14340 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14341 __FUNCTION__, strerror(-res), res);
14342 return false;
14343 }
14344
14345 pbcamera::StreamBuffer buffer;
14346 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014347 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014348 buffer.data = yuvBuffer->buffer;
14349 buffer.dataSize = yuvBuffer->frame_len;
14350
14351 pbcamera::CaptureRequest pbRequest;
14352 pbRequest.id = request.frame_number;
14353 pbRequest.outputBuffers.push_back(buffer);
14354
14355 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014356 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014357 if (res != OK) {
14358 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14359 strerror(-res), res);
14360 return false;
14361 }
14362
14363 hdrPlusRequest->yuvBuffer = yuvBuffer;
14364 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14365
14366 return true;
14367}
14368
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014369status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14370{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014371 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14372 return OK;
14373 }
14374
14375 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14376 if (res != OK) {
14377 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14378 strerror(-res), res);
14379 return res;
14380 }
14381 gHdrPlusClientOpening = true;
14382
14383 return OK;
14384}
14385
Chien-Yu Chenee335912017-02-09 17:53:20 -080014386status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14387{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014388 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014389
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014390 // Check if gHdrPlusClient is opened or being opened.
14391 if (gHdrPlusClient == nullptr) {
14392 if (gHdrPlusClientOpening) {
14393 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14394 return OK;
14395 }
14396
14397 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014398 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014399 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14400 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014401 return res;
14402 }
14403
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014404 // When opening HDR+ client completes, HDR+ mode will be enabled.
14405 return OK;
14406
Chien-Yu Chenee335912017-02-09 17:53:20 -080014407 }
14408
14409 // Configure stream for HDR+.
14410 res = configureHdrPlusStreamsLocked();
14411 if (res != OK) {
14412 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014413 return res;
14414 }
14415
14416 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14417 res = gHdrPlusClient->setZslHdrPlusMode(true);
14418 if (res != OK) {
14419 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014420 return res;
14421 }
14422
14423 mHdrPlusModeEnabled = true;
14424 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14425
14426 return OK;
14427}
14428
14429void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14430{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014431 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014432 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014433 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14434 if (res != OK) {
14435 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14436 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014437
14438 // Close HDR+ client so Easel can enter low power mode.
14439 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14440 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014441 }
14442
14443 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014444 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014445 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14446}
14447
14448status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014449{
14450 pbcamera::InputConfiguration inputConfig;
14451 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14452 status_t res = OK;
14453
14454 // Configure HDR+ client streams.
14455 // Get input config.
14456 if (mHdrPlusRawSrcChannel) {
14457 // HDR+ input buffers will be provided by HAL.
14458 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14459 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14460 if (res != OK) {
14461 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14462 __FUNCTION__, strerror(-res), res);
14463 return res;
14464 }
14465
14466 inputConfig.isSensorInput = false;
14467 } else {
14468 // Sensor MIPI will send data to Easel.
14469 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014470 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014471 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14472 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14473 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14474 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14475 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14476 if (mSensorModeInfo.num_raw_bits != 10) {
14477 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14478 mSensorModeInfo.num_raw_bits);
14479 return BAD_VALUE;
14480 }
14481
14482 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014483 }
14484
14485 // Get output configurations.
14486 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014487 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014488
14489 // Easel may need to output YUV output buffers if mPictureChannel was created.
14490 pbcamera::StreamConfiguration yuvOutputConfig;
14491 if (mPictureChannel != nullptr) {
14492 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14493 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14494 if (res != OK) {
14495 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14496 __FUNCTION__, strerror(-res), res);
14497
14498 return res;
14499 }
14500
14501 outputStreamConfigs.push_back(yuvOutputConfig);
14502 }
14503
14504 // TODO: consider other channels for YUV output buffers.
14505
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014506 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014507 if (res != OK) {
14508 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14509 strerror(-res), res);
14510 return res;
14511 }
14512
14513 return OK;
14514}
14515
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014516void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14517{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014518 if (client == nullptr) {
14519 ALOGE("%s: Opened client is null.", __FUNCTION__);
14520 return;
14521 }
14522
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014523 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014524 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14525
14526 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014527 if (!gHdrPlusClientOpening) {
14528 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14529 return;
14530 }
14531
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014532 gHdrPlusClient = std::move(client);
14533 gHdrPlusClientOpening = false;
14534
14535 // Set static metadata.
14536 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14537 if (res != OK) {
14538 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14539 __FUNCTION__, strerror(-res), res);
14540 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14541 gHdrPlusClient = nullptr;
14542 return;
14543 }
14544
14545 // Enable HDR+ mode.
14546 res = enableHdrPlusModeLocked();
14547 if (res != OK) {
14548 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14549 }
14550}
14551
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014552void QCamera3HardwareInterface::onOpenFailed(status_t err)
14553{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014554 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14555 Mutex::Autolock l(gHdrPlusClientLock);
14556 gHdrPlusClientOpening = false;
14557}
14558
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014559void QCamera3HardwareInterface::onFatalError()
14560{
14561 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14562
14563 // Set HAL state to error.
14564 pthread_mutex_lock(&mMutex);
14565 mState = ERROR;
14566 pthread_mutex_unlock(&mMutex);
14567
14568 handleCameraDeviceError();
14569}
14570
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014571void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014572 const camera_metadata_t &resultMetadata)
14573{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014574 if (result != nullptr) {
14575 if (result->outputBuffers.size() != 1) {
14576 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14577 result->outputBuffers.size());
14578 return;
14579 }
14580
14581 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14582 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14583 result->outputBuffers[0].streamId);
14584 return;
14585 }
14586
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014587 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014588 HdrPlusPendingRequest pendingRequest;
14589 {
14590 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14591 auto req = mHdrPlusPendingRequests.find(result->requestId);
14592 pendingRequest = req->second;
14593 }
14594
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014595 // Update the result metadata with the settings of the HDR+ still capture request because
14596 // the result metadata belongs to a ZSL buffer.
14597 CameraMetadata metadata;
14598 metadata = &resultMetadata;
14599 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14600 camera_metadata_t* updatedResultMetadata = metadata.release();
14601
14602 QCamera3PicChannel *picChannel =
14603 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14604
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014605 // Check if dumping HDR+ YUV output is enabled.
14606 char prop[PROPERTY_VALUE_MAX];
14607 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14608 bool dumpYuvOutput = atoi(prop);
14609
14610 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014611 // Dump yuv buffer to a ppm file.
14612 pbcamera::StreamConfiguration outputConfig;
14613 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14614 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14615 if (rc == OK) {
14616 char buf[FILENAME_MAX] = {};
14617 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14618 result->requestId, result->outputBuffers[0].streamId,
14619 outputConfig.image.width, outputConfig.image.height);
14620
14621 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14622 } else {
14623 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14624 __FUNCTION__, strerror(-rc), rc);
14625 }
14626 }
14627
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014628 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14629 auto halMetadata = std::make_shared<metadata_buffer_t>();
14630 clear_metadata_buffer(halMetadata.get());
14631
14632 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14633 // encoding.
14634 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14635 halStreamId, /*minFrameDuration*/0);
14636 if (res == OK) {
14637 // Return the buffer to pic channel for encoding.
14638 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14639 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14640 halMetadata);
14641 } else {
14642 // Return the buffer without encoding.
14643 // TODO: This should not happen but we may want to report an error buffer to camera
14644 // service.
14645 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14646 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14647 strerror(-res), res);
14648 }
14649
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014650 // Find the timestamp
14651 camera_metadata_ro_entry_t entry;
14652 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14653 ANDROID_SENSOR_TIMESTAMP, &entry);
14654 if (res != OK) {
14655 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14656 __FUNCTION__, result->requestId, strerror(-res), res);
14657 } else {
14658 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14659 }
14660
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014661 // Send HDR+ metadata to framework.
14662 {
14663 pthread_mutex_lock(&mMutex);
14664
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014665 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14666 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014667 pthread_mutex_unlock(&mMutex);
14668 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014669
14670 // Remove the HDR+ pending request.
14671 {
14672 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14673 auto req = mHdrPlusPendingRequests.find(result->requestId);
14674 mHdrPlusPendingRequests.erase(req);
14675 }
14676 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014677}
14678
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014679void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14680{
14681 if (failedResult == nullptr) {
14682 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14683 return;
14684 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014685
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014686 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014687
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014688 // Remove the pending HDR+ request.
14689 {
14690 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14691 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14692
14693 // Return the buffer to pic channel.
14694 QCamera3PicChannel *picChannel =
14695 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14696 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14697
14698 mHdrPlusPendingRequests.erase(pendingRequest);
14699 }
14700
14701 pthread_mutex_lock(&mMutex);
14702
14703 // Find the pending buffers.
14704 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14705 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14706 if (pendingBuffers->frame_number == failedResult->requestId) {
14707 break;
14708 }
14709 pendingBuffers++;
14710 }
14711
14712 // Send out buffer errors for the pending buffers.
14713 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14714 std::vector<camera3_stream_buffer_t> streamBuffers;
14715 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14716 // Prepare a stream buffer.
14717 camera3_stream_buffer_t streamBuffer = {};
14718 streamBuffer.stream = buffer.stream;
14719 streamBuffer.buffer = buffer.buffer;
14720 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14721 streamBuffer.acquire_fence = -1;
14722 streamBuffer.release_fence = -1;
14723
14724 streamBuffers.push_back(streamBuffer);
14725
14726 // Send out error buffer event.
14727 camera3_notify_msg_t notify_msg = {};
14728 notify_msg.type = CAMERA3_MSG_ERROR;
14729 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14730 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14731 notify_msg.message.error.error_stream = buffer.stream;
14732
14733 orchestrateNotify(&notify_msg);
14734 }
14735
14736 camera3_capture_result_t result = {};
14737 result.frame_number = pendingBuffers->frame_number;
14738 result.num_output_buffers = streamBuffers.size();
14739 result.output_buffers = &streamBuffers[0];
14740
14741 // Send out result with buffer errors.
14742 orchestrateResult(&result);
14743
14744 // Remove pending buffers.
14745 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14746 }
14747
14748 // Remove pending request.
14749 auto halRequest = mPendingRequestsList.begin();
14750 while (halRequest != mPendingRequestsList.end()) {
14751 if (halRequest->frame_number == failedResult->requestId) {
14752 mPendingRequestsList.erase(halRequest);
14753 break;
14754 }
14755 halRequest++;
14756 }
14757
14758 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014759}
14760
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014761
14762ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14763 mParent(parent) {}
14764
14765void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14766{
14767 std::lock_guard<std::mutex> lock(mLock);
14768 mShutters.emplace(frameNumber, Shutter());
14769}
14770
14771void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14772{
14773 std::lock_guard<std::mutex> lock(mLock);
14774
14775 // Make this frame's shutter ready.
14776 auto shutter = mShutters.find(frameNumber);
14777 if (shutter == mShutters.end()) {
14778 // Shutter was already sent.
14779 return;
14780 }
14781
14782 shutter->second.ready = true;
14783 shutter->second.timestamp = timestamp;
14784
14785 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14786 shutter = mShutters.begin();
14787 while (shutter != mShutters.end()) {
14788 if (!shutter->second.ready) {
14789 // If this shutter is not ready, the following shutters can't be sent.
14790 break;
14791 }
14792
14793 camera3_notify_msg_t msg = {};
14794 msg.type = CAMERA3_MSG_SHUTTER;
14795 msg.message.shutter.frame_number = shutter->first;
14796 msg.message.shutter.timestamp = shutter->second.timestamp;
14797 mParent->orchestrateNotify(&msg);
14798
14799 shutter = mShutters.erase(shutter);
14800 }
14801}
14802
14803void ShutterDispatcher::clear(uint32_t frameNumber)
14804{
14805 std::lock_guard<std::mutex> lock(mLock);
14806 mShutters.erase(frameNumber);
14807}
14808
14809void ShutterDispatcher::clear()
14810{
14811 std::lock_guard<std::mutex> lock(mLock);
14812
14813 // Log errors for stale shutters.
14814 for (auto &shutter : mShutters) {
14815 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14816 __FUNCTION__, shutter.first, shutter.second.ready,
14817 shutter.second.timestamp);
14818 }
14819 mShutters.clear();
14820}
14821
14822OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14823 mParent(parent) {}
14824
14825status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14826{
14827 std::lock_guard<std::mutex> lock(mLock);
14828 mStreamBuffers.clear();
14829 if (!streamList) {
14830 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14831 return -EINVAL;
14832 }
14833
14834 // Create a "frame-number -> buffer" map for each stream.
14835 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14836 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14837 }
14838
14839 return OK;
14840}
14841
14842status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14843{
14844 std::lock_guard<std::mutex> lock(mLock);
14845
14846 // Find the "frame-number -> buffer" map for the stream.
14847 auto buffers = mStreamBuffers.find(stream);
14848 if (buffers == mStreamBuffers.end()) {
14849 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14850 return -EINVAL;
14851 }
14852
14853 // Create an unready buffer for this frame number.
14854 buffers->second.emplace(frameNumber, Buffer());
14855 return OK;
14856}
14857
14858void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14859 const camera3_stream_buffer_t &buffer)
14860{
14861 std::lock_guard<std::mutex> lock(mLock);
14862
14863 // Find the frame number -> buffer map for the stream.
14864 auto buffers = mStreamBuffers.find(buffer.stream);
14865 if (buffers == mStreamBuffers.end()) {
14866 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14867 return;
14868 }
14869
14870 // Find the unready buffer this frame number and mark it ready.
14871 auto pendingBuffer = buffers->second.find(frameNumber);
14872 if (pendingBuffer == buffers->second.end()) {
14873 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14874 return;
14875 }
14876
14877 pendingBuffer->second.ready = true;
14878 pendingBuffer->second.buffer = buffer;
14879
14880 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14881 pendingBuffer = buffers->second.begin();
14882 while (pendingBuffer != buffers->second.end()) {
14883 if (!pendingBuffer->second.ready) {
14884 // If this buffer is not ready, the following buffers can't be sent.
14885 break;
14886 }
14887
14888 camera3_capture_result_t result = {};
14889 result.frame_number = pendingBuffer->first;
14890 result.num_output_buffers = 1;
14891 result.output_buffers = &pendingBuffer->second.buffer;
14892
14893 // Send out result with buffer errors.
14894 mParent->orchestrateResult(&result);
14895
14896 pendingBuffer = buffers->second.erase(pendingBuffer);
14897 }
14898}
14899
14900void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14901{
14902 std::lock_guard<std::mutex> lock(mLock);
14903
14904 // Log errors for stale buffers.
14905 for (auto &buffers : mStreamBuffers) {
14906 for (auto &buffer : buffers.second) {
14907 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14908 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14909 }
14910 buffers.second.clear();
14911 }
14912
14913 if (clearConfiguredStreams) {
14914 mStreamBuffers.clear();
14915 }
14916}
14917
Thierry Strudel3d639192016-09-09 11:52:26 -070014918}; //end namespace qcamera