blob: 4db2ede31867d9f17b32f682efa6ea2729e35e23 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700136// Whether to check for the GPU stride padding, or use the default
137//#define CHECK_GPU_PIXEL_ALIGNMENT
138
Thierry Strudel3d639192016-09-09 11:52:26 -0700139cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
140const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
141extern pthread_mutex_t gCamLock;
142volatile uint32_t gCamHal3LogLevel = 1;
143extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700144
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800145// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146// The following Easel related variables must be protected by gHdrPlusClientLock.
147EaselManagerClient gEaselManagerClient;
148bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
149std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
150bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700151bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700152bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800154// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
155bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
157Mutex gHdrPlusClientLock; // Protect above Easel related variables.
158
Thierry Strudel3d639192016-09-09 11:52:26 -0700159
160const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
161 {"On", CAM_CDS_MODE_ON},
162 {"Off", CAM_CDS_MODE_OFF},
163 {"Auto",CAM_CDS_MODE_AUTO}
164};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700165const QCamera3HardwareInterface::QCameraMap<
166 camera_metadata_enum_android_video_hdr_mode_t,
167 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
168 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
169 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
170};
171
Thierry Strudel54dc9782017-02-15 12:12:10 -0800172const QCamera3HardwareInterface::QCameraMap<
173 camera_metadata_enum_android_binning_correction_mode_t,
174 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
175 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
176 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
177};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700178
179const QCamera3HardwareInterface::QCameraMap<
180 camera_metadata_enum_android_ir_mode_t,
181 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
182 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
183 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
184 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
185};
Thierry Strudel3d639192016-09-09 11:52:26 -0700186
187const QCamera3HardwareInterface::QCameraMap<
188 camera_metadata_enum_android_control_effect_mode_t,
189 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
190 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
191 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
192 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
193 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
194 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
195 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
197 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
198 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_awb_mode_t,
203 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
204 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
205 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
206 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
207 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
208 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
209 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
210 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
211 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
212 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
213};
214
215const QCamera3HardwareInterface::QCameraMap<
216 camera_metadata_enum_android_control_scene_mode_t,
217 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
218 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
219 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
220 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
222 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
223 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
225 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
226 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
227 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
228 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
229 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
230 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
231 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
232 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800233 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
234 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_control_af_mode_t,
239 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
240 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
241 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
242 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
243 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
244 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
245 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
246 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_color_correction_aberration_mode_t,
251 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
252 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
253 CAM_COLOR_CORRECTION_ABERRATION_OFF },
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
255 CAM_COLOR_CORRECTION_ABERRATION_FAST },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
257 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_control_ae_antibanding_mode_t,
262 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
264 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_control_ae_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
272 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
274 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
275 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
277};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_flash_mode_t,
281 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
282 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
283 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
284 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
285};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_statistics_face_detect_mode_t,
289 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
290 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
291 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
292 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
297 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
298 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
299 CAM_FOCUS_UNCALIBRATED },
300 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
301 CAM_FOCUS_APPROXIMATE },
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
303 CAM_FOCUS_CALIBRATED }
304};
305
306const QCamera3HardwareInterface::QCameraMap<
307 camera_metadata_enum_android_lens_state_t,
308 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
309 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
310 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
311};
312
313const int32_t available_thumbnail_sizes[] = {0, 0,
314 176, 144,
315 240, 144,
316 256, 144,
317 240, 160,
318 256, 154,
319 240, 240,
320 320, 240};
321
322const QCamera3HardwareInterface::QCameraMap<
323 camera_metadata_enum_android_sensor_test_pattern_mode_t,
324 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
331};
332
333/* Since there is no mapping for all the options some Android enum are not listed.
334 * Also, the order in this list is important because while mapping from HAL to Android it will
335 * traverse from lower to higher index which means that for HAL values that are map to different
336 * Android values, the traverse logic will select the first one found.
337 */
338const QCamera3HardwareInterface::QCameraMap<
339 camera_metadata_enum_android_sensor_reference_illuminant1_t,
340 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
357};
358
359const QCamera3HardwareInterface::QCameraMap<
360 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
361 { 60, CAM_HFR_MODE_60FPS},
362 { 90, CAM_HFR_MODE_90FPS},
363 { 120, CAM_HFR_MODE_120FPS},
364 { 150, CAM_HFR_MODE_150FPS},
365 { 180, CAM_HFR_MODE_180FPS},
366 { 210, CAM_HFR_MODE_210FPS},
367 { 240, CAM_HFR_MODE_240FPS},
368 { 480, CAM_HFR_MODE_480FPS},
369};
370
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700371const QCamera3HardwareInterface::QCameraMap<
372 qcamera3_ext_instant_aec_mode_t,
373 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
374 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
375 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
376 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
377};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_exposure_meter_mode_t,
381 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
382 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
383 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
385 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
386 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
389};
390
391const QCamera3HardwareInterface::QCameraMap<
392 qcamera3_ext_iso_mode_t,
393 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
394 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
395 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
396 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
397 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
398 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
399 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
400 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
401 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
402};
403
Thierry Strudel3d639192016-09-09 11:52:26 -0700404camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
405 .initialize = QCamera3HardwareInterface::initialize,
406 .configure_streams = QCamera3HardwareInterface::configure_streams,
407 .register_stream_buffers = NULL,
408 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
409 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
410 .get_metadata_vendor_tag_ops = NULL,
411 .dump = QCamera3HardwareInterface::dump,
412 .flush = QCamera3HardwareInterface::flush,
413 .reserved = {0},
414};
415
416// initialise to some default value
417uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
418
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700419static inline void logEaselEvent(const char *tag, const char *event) {
420 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
421 struct timespec ts = {};
422 static int64_t kMsPerSec = 1000;
423 static int64_t kNsPerMs = 1000000;
424 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
425 if (res != OK) {
426 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
427 } else {
428 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
429 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
430 }
431 }
432}
433
Thierry Strudel3d639192016-09-09 11:52:26 -0700434/*===========================================================================
435 * FUNCTION : QCamera3HardwareInterface
436 *
437 * DESCRIPTION: constructor of QCamera3HardwareInterface
438 *
439 * PARAMETERS :
440 * @cameraId : camera ID
441 *
442 * RETURN : none
443 *==========================================================================*/
444QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
445 const camera_module_callbacks_t *callbacks)
446 : mCameraId(cameraId),
447 mCameraHandle(NULL),
448 mCameraInitialized(false),
449 mCallbackOps(NULL),
450 mMetadataChannel(NULL),
451 mPictureChannel(NULL),
452 mRawChannel(NULL),
453 mSupportChannel(NULL),
454 mAnalysisChannel(NULL),
455 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700456 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800458 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800459 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mChannelHandle(0),
461 mFirstConfiguration(true),
462 mFlush(false),
463 mFlushPerf(false),
464 mParamHeap(NULL),
465 mParameters(NULL),
466 mPrevParameters(NULL),
467 m_bIsVideo(false),
468 m_bIs4KVideo(false),
469 m_bEisSupportedSize(false),
470 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800471 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700472 m_MobicatMask(0),
473 mMinProcessedFrameDuration(0),
474 mMinJpegFrameDuration(0),
475 mMinRawFrameDuration(0),
476 mMetaFrameCount(0U),
477 mUpdateDebugLevel(false),
478 mCallbacks(callbacks),
479 mCaptureIntent(0),
480 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700481 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800482 /* DevCamDebug metadata internal m control*/
483 mDevCamDebugMetaEnable(0),
484 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700485 mBatchSize(0),
486 mToBeQueuedVidBufs(0),
487 mHFRVideoFps(DEFAULT_VIDEO_FPS),
488 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800489 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800490 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mFirstFrameNumberInBatch(0),
492 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800493 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700494 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
495 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000496 mPDSupported(false),
497 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700498 mInstantAEC(false),
499 mResetInstantAEC(false),
500 mInstantAECSettledFrameNumber(0),
501 mAecSkipDisplayFrameBound(0),
502 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800503 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700505 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700506 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mState(CLOSED),
508 mIsDeviceLinked(false),
509 mIsMainCamera(true),
510 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700511 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800512 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800513 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700514 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800515 mIsApInputUsedForHdrPlus(false),
516 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700518{
519 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700520 mCommon.init(gCamCapability[cameraId]);
521 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#ifndef USE_HAL_3_3
523 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
524#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mCameraDevice.common.close = close_camera_device;
528 mCameraDevice.ops = &mCameraOps;
529 mCameraDevice.priv = this;
530 gCamCapability[cameraId]->version = CAM_HAL_V3;
531 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
532 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
533 gCamCapability[cameraId]->min_num_pp_bufs = 3;
534
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800535 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700536
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800537 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700538 mPendingLiveRequest = 0;
539 mCurrentRequestId = -1;
540 pthread_mutex_init(&mMutex, NULL);
541
542 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
543 mDefaultMetadata[i] = NULL;
544
545 // Getting system props of different kinds
546 char prop[PROPERTY_VALUE_MAX];
547 memset(prop, 0, sizeof(prop));
548 property_get("persist.camera.raw.dump", prop, "0");
549 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800550 property_get("persist.camera.hal3.force.hdr", prop, "0");
551 mForceHdrSnapshot = atoi(prop);
552
Thierry Strudel3d639192016-09-09 11:52:26 -0700553 if (mEnableRawDump)
554 LOGD("Raw dump from Camera HAL enabled");
555
556 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
557 memset(mLdafCalib, 0, sizeof(mLdafCalib));
558
559 memset(prop, 0, sizeof(prop));
560 property_get("persist.camera.tnr.preview", prop, "0");
561 m_bTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.swtnr.preview", prop, "1");
565 m_bSwTnrPreview = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700568 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700569 m_bTnrVideo = (uint8_t)atoi(prop);
570
571 memset(prop, 0, sizeof(prop));
572 property_get("persist.camera.avtimer.debug", prop, "0");
573 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800574 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700575
Thierry Strudel54dc9782017-02-15 12:12:10 -0800576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.cacmode.disable", prop, "0");
578 m_cacModeDisabled = (uint8_t)atoi(prop);
579
Thierry Strudel3d639192016-09-09 11:52:26 -0700580 //Load and read GPU library.
581 lib_surface_utils = NULL;
582 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700583 mSurfaceStridePadding = CAM_PAD_TO_64;
584#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
586 if (lib_surface_utils) {
587 *(void **)&LINK_get_surface_pixel_alignment =
588 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
589 if (LINK_get_surface_pixel_alignment) {
590 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
591 }
592 dlclose(lib_surface_utils);
593 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700594#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000595 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
596 mPDSupported = (0 <= mPDIndex) ? true : false;
597
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700598 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700599}
600
601/*===========================================================================
602 * FUNCTION : ~QCamera3HardwareInterface
603 *
604 * DESCRIPTION: destructor of QCamera3HardwareInterface
605 *
606 * PARAMETERS : none
607 *
608 * RETURN : none
609 *==========================================================================*/
610QCamera3HardwareInterface::~QCamera3HardwareInterface()
611{
612 LOGD("E");
613
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800614 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700615
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800616 // Disable power hint and enable the perf lock for close camera
617 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
618 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
619
620 // unlink of dualcam during close camera
621 if (mIsDeviceLinked) {
622 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
623 &m_pDualCamCmdPtr->bundle_info;
624 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
625 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
626 pthread_mutex_lock(&gCamLock);
627
628 if (mIsMainCamera == 1) {
629 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
630 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
631 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
632 // related session id should be session id of linked session
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 } else {
635 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
636 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
637 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800640 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800641 pthread_mutex_unlock(&gCamLock);
642
643 rc = mCameraHandle->ops->set_dual_cam_cmd(
644 mCameraHandle->camera_handle);
645 if (rc < 0) {
646 LOGE("Dualcam: Unlink failed, but still proceed to close");
647 }
648 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700649
650 /* We need to stop all streams before deleting any stream */
651 if (mRawDumpChannel) {
652 mRawDumpChannel->stop();
653 }
654
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700655 if (mHdrPlusRawSrcChannel) {
656 mHdrPlusRawSrcChannel->stop();
657 }
658
Thierry Strudel3d639192016-09-09 11:52:26 -0700659 // NOTE: 'camera3_stream_t *' objects are already freed at
660 // this stage by the framework
661 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
662 it != mStreamInfo.end(); it++) {
663 QCamera3ProcessingChannel *channel = (*it)->channel;
664 if (channel) {
665 channel->stop();
666 }
667 }
668 if (mSupportChannel)
669 mSupportChannel->stop();
670
671 if (mAnalysisChannel) {
672 mAnalysisChannel->stop();
673 }
674 if (mMetadataChannel) {
675 mMetadataChannel->stop();
676 }
677 if (mChannelHandle) {
678 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
679 mChannelHandle);
680 LOGD("stopping channel %d", mChannelHandle);
681 }
682
683 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
684 it != mStreamInfo.end(); it++) {
685 QCamera3ProcessingChannel *channel = (*it)->channel;
686 if (channel)
687 delete channel;
688 free (*it);
689 }
690 if (mSupportChannel) {
691 delete mSupportChannel;
692 mSupportChannel = NULL;
693 }
694
695 if (mAnalysisChannel) {
696 delete mAnalysisChannel;
697 mAnalysisChannel = NULL;
698 }
699 if (mRawDumpChannel) {
700 delete mRawDumpChannel;
701 mRawDumpChannel = NULL;
702 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700703 if (mHdrPlusRawSrcChannel) {
704 delete mHdrPlusRawSrcChannel;
705 mHdrPlusRawSrcChannel = NULL;
706 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700707 if (mDummyBatchChannel) {
708 delete mDummyBatchChannel;
709 mDummyBatchChannel = NULL;
710 }
711
712 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800713 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700714
715 if (mMetadataChannel) {
716 delete mMetadataChannel;
717 mMetadataChannel = NULL;
718 }
719
720 /* Clean up all channels */
721 if (mCameraInitialized) {
722 if(!mFirstConfiguration){
723 //send the last unconfigure
724 cam_stream_size_info_t stream_config_info;
725 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
726 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
727 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800728 m_bIs4KVideo ? 0 :
729 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700730 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700731 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
732 stream_config_info);
733 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
734 if (rc < 0) {
735 LOGE("set_parms failed for unconfigure");
736 }
737 }
738 deinitParameters();
739 }
740
741 if (mChannelHandle) {
742 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
743 mChannelHandle);
744 LOGH("deleting channel %d", mChannelHandle);
745 mChannelHandle = 0;
746 }
747
748 if (mState != CLOSED)
749 closeCamera();
750
751 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
752 req.mPendingBufferList.clear();
753 }
754 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 for (pendingRequestIterator i = mPendingRequestsList.begin();
756 i != mPendingRequestsList.end();) {
757 i = erasePendingRequest(i);
758 }
759 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
760 if (mDefaultMetadata[i])
761 free_camera_metadata(mDefaultMetadata[i]);
762
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800763 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700764
765 pthread_cond_destroy(&mRequestCond);
766
767 pthread_cond_destroy(&mBuffersCond);
768
769 pthread_mutex_destroy(&mMutex);
770 LOGD("X");
771}
772
773/*===========================================================================
774 * FUNCTION : erasePendingRequest
775 *
776 * DESCRIPTION: function to erase a desired pending request after freeing any
777 * allocated memory
778 *
779 * PARAMETERS :
780 * @i : iterator pointing to pending request to be erased
781 *
782 * RETURN : iterator pointing to the next request
783 *==========================================================================*/
784QCamera3HardwareInterface::pendingRequestIterator
785 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
786{
787 if (i->input_buffer != NULL) {
788 free(i->input_buffer);
789 i->input_buffer = NULL;
790 }
791 if (i->settings != NULL)
792 free_camera_metadata((camera_metadata_t*)i->settings);
793 return mPendingRequestsList.erase(i);
794}
795
796/*===========================================================================
797 * FUNCTION : camEvtHandle
798 *
799 * DESCRIPTION: Function registered to mm-camera-interface to handle events
800 *
801 * PARAMETERS :
802 * @camera_handle : interface layer camera handle
803 * @evt : ptr to event
804 * @user_data : user data ptr
805 *
806 * RETURN : none
807 *==========================================================================*/
808void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
809 mm_camera_event_t *evt,
810 void *user_data)
811{
812 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
813 if (obj && evt) {
814 switch(evt->server_event_type) {
815 case CAM_EVENT_TYPE_DAEMON_DIED:
816 pthread_mutex_lock(&obj->mMutex);
817 obj->mState = ERROR;
818 pthread_mutex_unlock(&obj->mMutex);
819 LOGE("Fatal, camera daemon died");
820 break;
821
822 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
823 LOGD("HAL got request pull from Daemon");
824 pthread_mutex_lock(&obj->mMutex);
825 obj->mWokenUpByDaemon = true;
826 obj->unblockRequestIfNecessary();
827 pthread_mutex_unlock(&obj->mMutex);
828 break;
829
830 default:
831 LOGW("Warning: Unhandled event %d",
832 evt->server_event_type);
833 break;
834 }
835 } else {
836 LOGE("NULL user_data/evt");
837 }
838}
839
840/*===========================================================================
841 * FUNCTION : openCamera
842 *
843 * DESCRIPTION: open camera
844 *
845 * PARAMETERS :
846 * @hw_device : double ptr for camera device struct
847 *
848 * RETURN : int32_t type of status
849 * NO_ERROR -- success
850 * none-zero failure code
851 *==========================================================================*/
852int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
853{
854 int rc = 0;
855 if (mState != CLOSED) {
856 *hw_device = NULL;
857 return PERMISSION_DENIED;
858 }
859
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700860 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800861 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700862 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
863 mCameraId);
864
865 rc = openCamera();
866 if (rc == 0) {
867 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800868 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800870 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700871
Thierry Strudel3d639192016-09-09 11:52:26 -0700872 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
873 mCameraId, rc);
874
875 if (rc == NO_ERROR) {
876 mState = OPENED;
877 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800878
Thierry Strudel3d639192016-09-09 11:52:26 -0700879 return rc;
880}
881
882/*===========================================================================
883 * FUNCTION : openCamera
884 *
885 * DESCRIPTION: open camera
886 *
887 * PARAMETERS : none
888 *
889 * RETURN : int32_t type of status
890 * NO_ERROR -- success
891 * none-zero failure code
892 *==========================================================================*/
893int QCamera3HardwareInterface::openCamera()
894{
895 int rc = 0;
896 char value[PROPERTY_VALUE_MAX];
897
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800898 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700899 if (mCameraHandle) {
900 LOGE("Failure: Camera already opened");
901 return ALREADY_EXISTS;
902 }
903
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700904 {
905 Mutex::Autolock l(gHdrPlusClientLock);
906 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700907 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700908 rc = gEaselManagerClient.resume();
909 if (rc != 0) {
910 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
911 return rc;
912 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800913 }
914 }
915
Thierry Strudel3d639192016-09-09 11:52:26 -0700916 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
917 if (rc < 0) {
918 LOGE("Failed to reserve flash for camera id: %d",
919 mCameraId);
920 return UNKNOWN_ERROR;
921 }
922
923 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
924 if (rc) {
925 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
926 return rc;
927 }
928
929 if (!mCameraHandle) {
930 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
931 return -ENODEV;
932 }
933
934 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
935 camEvtHandle, (void *)this);
936
937 if (rc < 0) {
938 LOGE("Error, failed to register event callback");
939 /* Not closing camera here since it is already handled in destructor */
940 return FAILED_TRANSACTION;
941 }
942
943 mExifParams.debug_params =
944 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
945 if (mExifParams.debug_params) {
946 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
947 } else {
948 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
949 return NO_MEMORY;
950 }
951 mFirstConfiguration = true;
952
953 //Notify display HAL that a camera session is active.
954 //But avoid calling the same during bootup because camera service might open/close
955 //cameras at boot time during its initialization and display service will also internally
956 //wait for camera service to initialize first while calling this display API, resulting in a
957 //deadlock situation. Since boot time camera open/close calls are made only to fetch
958 //capabilities, no need of this display bw optimization.
959 //Use "service.bootanim.exit" property to know boot status.
960 property_get("service.bootanim.exit", value, "0");
961 if (atoi(value) == 1) {
962 pthread_mutex_lock(&gCamLock);
963 if (gNumCameraSessions++ == 0) {
964 setCameraLaunchStatus(true);
965 }
966 pthread_mutex_unlock(&gCamLock);
967 }
968
969 //fill the session id needed while linking dual cam
970 pthread_mutex_lock(&gCamLock);
971 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
972 &sessionId[mCameraId]);
973 pthread_mutex_unlock(&gCamLock);
974
975 if (rc < 0) {
976 LOGE("Error, failed to get sessiion id");
977 return UNKNOWN_ERROR;
978 } else {
979 //Allocate related cam sync buffer
980 //this is needed for the payload that goes along with bundling cmd for related
981 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700982 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
983 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700984 if(rc != OK) {
985 rc = NO_MEMORY;
986 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
987 return NO_MEMORY;
988 }
989
990 //Map memory for related cam sync buffer
991 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700992 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
993 m_pDualCamCmdHeap->getFd(0),
994 sizeof(cam_dual_camera_cmd_info_t),
995 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700996 if(rc < 0) {
997 LOGE("Dualcam: failed to map Related cam sync buffer");
998 rc = FAILED_TRANSACTION;
999 return NO_MEMORY;
1000 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 m_pDualCamCmdPtr =
1002 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001003 }
1004
1005 LOGH("mCameraId=%d",mCameraId);
1006
1007 return NO_ERROR;
1008}
1009
1010/*===========================================================================
1011 * FUNCTION : closeCamera
1012 *
1013 * DESCRIPTION: close camera
1014 *
1015 * PARAMETERS : none
1016 *
1017 * RETURN : int32_t type of status
1018 * NO_ERROR -- success
1019 * none-zero failure code
1020 *==========================================================================*/
1021int QCamera3HardwareInterface::closeCamera()
1022{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001023 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001024 int rc = NO_ERROR;
1025 char value[PROPERTY_VALUE_MAX];
1026
1027 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1028 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001029
1030 // unmap memory for related cam sync buffer
1031 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001032 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001033 if (NULL != m_pDualCamCmdHeap) {
1034 m_pDualCamCmdHeap->deallocate();
1035 delete m_pDualCamCmdHeap;
1036 m_pDualCamCmdHeap = NULL;
1037 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001038 }
1039
Thierry Strudel3d639192016-09-09 11:52:26 -07001040 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1041 mCameraHandle = NULL;
1042
1043 //reset session id to some invalid id
1044 pthread_mutex_lock(&gCamLock);
1045 sessionId[mCameraId] = 0xDEADBEEF;
1046 pthread_mutex_unlock(&gCamLock);
1047
1048 //Notify display HAL that there is no active camera session
1049 //but avoid calling the same during bootup. Refer to openCamera
1050 //for more details.
1051 property_get("service.bootanim.exit", value, "0");
1052 if (atoi(value) == 1) {
1053 pthread_mutex_lock(&gCamLock);
1054 if (--gNumCameraSessions == 0) {
1055 setCameraLaunchStatus(false);
1056 }
1057 pthread_mutex_unlock(&gCamLock);
1058 }
1059
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 if (mExifParams.debug_params) {
1061 free(mExifParams.debug_params);
1062 mExifParams.debug_params = NULL;
1063 }
1064 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1065 LOGW("Failed to release flash for camera id: %d",
1066 mCameraId);
1067 }
1068 mState = CLOSED;
1069 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1070 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001071
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001072 {
1073 Mutex::Autolock l(gHdrPlusClientLock);
1074 if (gHdrPlusClient != nullptr) {
1075 // Disable HDR+ mode.
1076 disableHdrPlusModeLocked();
1077 // Disconnect Easel if it's connected.
1078 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1079 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001080 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001081
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001082 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001083 rc = gEaselManagerClient.stopMipi(mCameraId);
1084 if (rc != 0) {
1085 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1086 }
1087
1088 rc = gEaselManagerClient.suspend();
1089 if (rc != 0) {
1090 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1091 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001092 }
1093 }
1094
Thierry Strudel3d639192016-09-09 11:52:26 -07001095 return rc;
1096}
1097
1098/*===========================================================================
1099 * FUNCTION : initialize
1100 *
1101 * DESCRIPTION: Initialize frameworks callback functions
1102 *
1103 * PARAMETERS :
1104 * @callback_ops : callback function to frameworks
1105 *
1106 * RETURN :
1107 *
1108 *==========================================================================*/
1109int QCamera3HardwareInterface::initialize(
1110 const struct camera3_callback_ops *callback_ops)
1111{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001112 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001113 int rc;
1114
1115 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1116 pthread_mutex_lock(&mMutex);
1117
1118 // Validate current state
1119 switch (mState) {
1120 case OPENED:
1121 /* valid state */
1122 break;
1123 default:
1124 LOGE("Invalid state %d", mState);
1125 rc = -ENODEV;
1126 goto err1;
1127 }
1128
1129 rc = initParameters();
1130 if (rc < 0) {
1131 LOGE("initParamters failed %d", rc);
1132 goto err1;
1133 }
1134 mCallbackOps = callback_ops;
1135
1136 mChannelHandle = mCameraHandle->ops->add_channel(
1137 mCameraHandle->camera_handle, NULL, NULL, this);
1138 if (mChannelHandle == 0) {
1139 LOGE("add_channel failed");
1140 rc = -ENOMEM;
1141 pthread_mutex_unlock(&mMutex);
1142 return rc;
1143 }
1144
1145 pthread_mutex_unlock(&mMutex);
1146 mCameraInitialized = true;
1147 mState = INITIALIZED;
1148 LOGI("X");
1149 return 0;
1150
1151err1:
1152 pthread_mutex_unlock(&mMutex);
1153 return rc;
1154}
1155
1156/*===========================================================================
1157 * FUNCTION : validateStreamDimensions
1158 *
1159 * DESCRIPTION: Check if the configuration requested are those advertised
1160 *
1161 * PARAMETERS :
1162 * @stream_list : streams to be configured
1163 *
1164 * RETURN :
1165 *
1166 *==========================================================================*/
1167int QCamera3HardwareInterface::validateStreamDimensions(
1168 camera3_stream_configuration_t *streamList)
1169{
1170 int rc = NO_ERROR;
1171 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001172 uint32_t depthWidth = 0;
1173 uint32_t depthHeight = 0;
1174 if (mPDSupported) {
1175 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1176 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1177 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001178
1179 camera3_stream_t *inputStream = NULL;
1180 /*
1181 * Loop through all streams to find input stream if it exists*
1182 */
1183 for (size_t i = 0; i< streamList->num_streams; i++) {
1184 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1185 if (inputStream != NULL) {
1186 LOGE("Error, Multiple input streams requested");
1187 return -EINVAL;
1188 }
1189 inputStream = streamList->streams[i];
1190 }
1191 }
1192 /*
1193 * Loop through all streams requested in configuration
1194 * Check if unsupported sizes have been requested on any of them
1195 */
1196 for (size_t j = 0; j < streamList->num_streams; j++) {
1197 bool sizeFound = false;
1198 camera3_stream_t *newStream = streamList->streams[j];
1199
1200 uint32_t rotatedHeight = newStream->height;
1201 uint32_t rotatedWidth = newStream->width;
1202 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1203 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1204 rotatedHeight = newStream->width;
1205 rotatedWidth = newStream->height;
1206 }
1207
1208 /*
1209 * Sizes are different for each type of stream format check against
1210 * appropriate table.
1211 */
1212 switch (newStream->format) {
1213 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1214 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1215 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001216 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1217 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1218 mPDSupported) {
1219 if ((depthWidth == newStream->width) &&
1220 (depthHeight == newStream->height)) {
1221 sizeFound = true;
1222 }
1223 break;
1224 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001225 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1226 for (size_t i = 0; i < count; i++) {
1227 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1228 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1229 sizeFound = true;
1230 break;
1231 }
1232 }
1233 break;
1234 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001235 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1236 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001237 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001238 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001239 if ((depthSamplesCount == newStream->width) &&
1240 (1 == newStream->height)) {
1241 sizeFound = true;
1242 }
1243 break;
1244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001245 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1246 /* Verify set size against generated sizes table */
1247 for (size_t i = 0; i < count; i++) {
1248 if (((int32_t)rotatedWidth ==
1249 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1250 ((int32_t)rotatedHeight ==
1251 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1252 sizeFound = true;
1253 break;
1254 }
1255 }
1256 break;
1257 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1258 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1259 default:
1260 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1261 || newStream->stream_type == CAMERA3_STREAM_INPUT
1262 || IS_USAGE_ZSL(newStream->usage)) {
1263 if (((int32_t)rotatedWidth ==
1264 gCamCapability[mCameraId]->active_array_size.width) &&
1265 ((int32_t)rotatedHeight ==
1266 gCamCapability[mCameraId]->active_array_size.height)) {
1267 sizeFound = true;
1268 break;
1269 }
1270 /* We could potentially break here to enforce ZSL stream
1271 * set from frameworks always is full active array size
1272 * but it is not clear from the spc if framework will always
1273 * follow that, also we have logic to override to full array
1274 * size, so keeping the logic lenient at the moment
1275 */
1276 }
1277 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1278 MAX_SIZES_CNT);
1279 for (size_t i = 0; i < count; i++) {
1280 if (((int32_t)rotatedWidth ==
1281 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1282 ((int32_t)rotatedHeight ==
1283 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1284 sizeFound = true;
1285 break;
1286 }
1287 }
1288 break;
1289 } /* End of switch(newStream->format) */
1290
1291 /* We error out even if a single stream has unsupported size set */
1292 if (!sizeFound) {
1293 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1294 rotatedWidth, rotatedHeight, newStream->format,
1295 gCamCapability[mCameraId]->active_array_size.width,
1296 gCamCapability[mCameraId]->active_array_size.height);
1297 rc = -EINVAL;
1298 break;
1299 }
1300 } /* End of for each stream */
1301 return rc;
1302}
1303
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001304/*===========================================================================
1305 * FUNCTION : validateUsageFlags
1306 *
1307 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1308 *
1309 * PARAMETERS :
1310 * @stream_list : streams to be configured
1311 *
1312 * RETURN :
1313 * NO_ERROR if the usage flags are supported
1314 * error code if usage flags are not supported
1315 *
1316 *==========================================================================*/
1317int QCamera3HardwareInterface::validateUsageFlags(
1318 const camera3_stream_configuration_t* streamList)
1319{
1320 for (size_t j = 0; j < streamList->num_streams; j++) {
1321 const camera3_stream_t *newStream = streamList->streams[j];
1322
1323 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1324 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1325 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1326 continue;
1327 }
1328
1329 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1330 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1331 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1332 bool forcePreviewUBWC = true;
1333 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1334 forcePreviewUBWC = false;
1335 }
1336 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1337 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1338 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1339 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1340 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1341 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1342
1343 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1344 // So color spaces will always match.
1345
1346 // Check whether underlying formats of shared streams match.
1347 if (isVideo && isPreview && videoFormat != previewFormat) {
1348 LOGE("Combined video and preview usage flag is not supported");
1349 return -EINVAL;
1350 }
1351 if (isPreview && isZSL && previewFormat != zslFormat) {
1352 LOGE("Combined preview and zsl usage flag is not supported");
1353 return -EINVAL;
1354 }
1355 if (isVideo && isZSL && videoFormat != zslFormat) {
1356 LOGE("Combined video and zsl usage flag is not supported");
1357 return -EINVAL;
1358 }
1359 }
1360 return NO_ERROR;
1361}
1362
1363/*===========================================================================
1364 * FUNCTION : validateUsageFlagsForEis
1365 *
1366 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1367 *
1368 * PARAMETERS :
1369 * @stream_list : streams to be configured
1370 *
1371 * RETURN :
1372 * NO_ERROR if the usage flags are supported
1373 * error code if usage flags are not supported
1374 *
1375 *==========================================================================*/
1376int QCamera3HardwareInterface::validateUsageFlagsForEis(
1377 const camera3_stream_configuration_t* streamList)
1378{
1379 for (size_t j = 0; j < streamList->num_streams; j++) {
1380 const camera3_stream_t *newStream = streamList->streams[j];
1381
1382 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1383 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1384
1385 // Because EIS is "hard-coded" for certain use case, and current
1386 // implementation doesn't support shared preview and video on the same
1387 // stream, return failure if EIS is forced on.
1388 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1389 LOGE("Combined video and preview usage flag is not supported due to EIS");
1390 return -EINVAL;
1391 }
1392 }
1393 return NO_ERROR;
1394}
1395
Thierry Strudel3d639192016-09-09 11:52:26 -07001396/*==============================================================================
1397 * FUNCTION : isSupportChannelNeeded
1398 *
1399 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1400 *
1401 * PARAMETERS :
1402 * @stream_list : streams to be configured
1403 * @stream_config_info : the config info for streams to be configured
1404 *
1405 * RETURN : Boolen true/false decision
1406 *
1407 *==========================================================================*/
1408bool QCamera3HardwareInterface::isSupportChannelNeeded(
1409 camera3_stream_configuration_t *streamList,
1410 cam_stream_size_info_t stream_config_info)
1411{
1412 uint32_t i;
1413 bool pprocRequested = false;
1414 /* Check for conditions where PProc pipeline does not have any streams*/
1415 for (i = 0; i < stream_config_info.num_streams; i++) {
1416 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1417 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1418 pprocRequested = true;
1419 break;
1420 }
1421 }
1422
1423 if (pprocRequested == false )
1424 return true;
1425
1426 /* Dummy stream needed if only raw or jpeg streams present */
1427 for (i = 0; i < streamList->num_streams; i++) {
1428 switch(streamList->streams[i]->format) {
1429 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1430 case HAL_PIXEL_FORMAT_RAW10:
1431 case HAL_PIXEL_FORMAT_RAW16:
1432 case HAL_PIXEL_FORMAT_BLOB:
1433 break;
1434 default:
1435 return false;
1436 }
1437 }
1438 return true;
1439}
1440
1441/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001444 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001445 *
1446 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001447 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001448 *
1449 * RETURN : int32_t type of status
1450 * NO_ERROR -- success
1451 * none-zero failure code
1452 *
1453 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001454int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001455{
1456 int32_t rc = NO_ERROR;
1457
1458 cam_dimension_t max_dim = {0, 0};
1459 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1460 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1461 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1462 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1463 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1464 }
1465
1466 clear_metadata_buffer(mParameters);
1467
1468 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1469 max_dim);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1476 if (rc != NO_ERROR) {
1477 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1478 return rc;
1479 }
1480
1481 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001483
1484 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1485 mParameters);
1486 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 return rc;
1489 }
1490
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001492 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1493 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1494 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1495 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1496 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001497
1498 return rc;
1499}
1500
1501/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001502 * FUNCTION : addToPPFeatureMask
1503 *
1504 * DESCRIPTION: add additional features to pp feature mask based on
1505 * stream type and usecase
1506 *
1507 * PARAMETERS :
1508 * @stream_format : stream type for feature mask
1509 * @stream_idx : stream idx within postprocess_mask list to change
1510 *
1511 * RETURN : NULL
1512 *
1513 *==========================================================================*/
1514void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1515 uint32_t stream_idx)
1516{
1517 char feature_mask_value[PROPERTY_VALUE_MAX];
1518 cam_feature_mask_t feature_mask;
1519 int args_converted;
1520 int property_len;
1521
1522 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001523#ifdef _LE_CAMERA_
1524 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1525 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1526 property_len = property_get("persist.camera.hal3.feature",
1527 feature_mask_value, swtnr_feature_mask_value);
1528#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001529 property_len = property_get("persist.camera.hal3.feature",
1530 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001532 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1533 (feature_mask_value[1] == 'x')) {
1534 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1535 } else {
1536 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1537 }
1538 if (1 != args_converted) {
1539 feature_mask = 0;
1540 LOGE("Wrong feature mask %s", feature_mask_value);
1541 return;
1542 }
1543
1544 switch (stream_format) {
1545 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1546 /* Add LLVD to pp feature mask only if video hint is enabled */
1547 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1548 mStreamConfigInfo.postprocess_mask[stream_idx]
1549 |= CAM_QTI_FEATURE_SW_TNR;
1550 LOGH("Added SW TNR to pp feature mask");
1551 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx]
1553 |= CAM_QCOM_FEATURE_LLVD;
1554 LOGH("Added LLVD SeeMore to pp feature mask");
1555 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001556 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1557 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1558 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1559 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001560 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1561 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1562 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1563 CAM_QTI_FEATURE_BINNING_CORRECTION;
1564 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001565 break;
1566 }
1567 default:
1568 break;
1569 }
1570 LOGD("PP feature mask %llx",
1571 mStreamConfigInfo.postprocess_mask[stream_idx]);
1572}
1573
1574/*==============================================================================
1575 * FUNCTION : updateFpsInPreviewBuffer
1576 *
1577 * DESCRIPTION: update FPS information in preview buffer.
1578 *
1579 * PARAMETERS :
1580 * @metadata : pointer to metadata buffer
1581 * @frame_number: frame_number to look for in pending buffer list
1582 *
1583 * RETURN : None
1584 *
1585 *==========================================================================*/
1586void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1587 uint32_t frame_number)
1588{
1589 // Mark all pending buffers for this particular request
1590 // with corresponding framerate information
1591 for (List<PendingBuffersInRequest>::iterator req =
1592 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1593 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1594 for(List<PendingBufferInfo>::iterator j =
1595 req->mPendingBufferList.begin();
1596 j != req->mPendingBufferList.end(); j++) {
1597 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1598 if ((req->frame_number == frame_number) &&
1599 (channel->getStreamTypeMask() &
1600 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1601 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1602 CAM_INTF_PARM_FPS_RANGE, metadata) {
1603 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1604 struct private_handle_t *priv_handle =
1605 (struct private_handle_t *)(*(j->buffer));
1606 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1607 }
1608 }
1609 }
1610 }
1611}
1612
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001613/*==============================================================================
1614 * FUNCTION : updateTimeStampInPendingBuffers
1615 *
1616 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1617 * of a frame number
1618 *
1619 * PARAMETERS :
1620 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1621 * @timestamp : timestamp to be set
1622 *
1623 * RETURN : None
1624 *
1625 *==========================================================================*/
1626void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1627 uint32_t frameNumber, nsecs_t timestamp)
1628{
1629 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1630 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1631 if (req->frame_number != frameNumber)
1632 continue;
1633
1634 for (auto k = req->mPendingBufferList.begin();
1635 k != req->mPendingBufferList.end(); k++ ) {
1636 struct private_handle_t *priv_handle =
1637 (struct private_handle_t *) (*(k->buffer));
1638 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1639 }
1640 }
1641 return;
1642}
1643
Thierry Strudel3d639192016-09-09 11:52:26 -07001644/*===========================================================================
1645 * FUNCTION : configureStreams
1646 *
1647 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1648 * and output streams.
1649 *
1650 * PARAMETERS :
1651 * @stream_list : streams to be configured
1652 *
1653 * RETURN :
1654 *
1655 *==========================================================================*/
1656int QCamera3HardwareInterface::configureStreams(
1657 camera3_stream_configuration_t *streamList)
1658{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001659 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660 int rc = 0;
1661
1662 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001663 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001664 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001665 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001666
1667 return rc;
1668}
1669
1670/*===========================================================================
1671 * FUNCTION : configureStreamsPerfLocked
1672 *
1673 * DESCRIPTION: configureStreams while perfLock is held.
1674 *
1675 * PARAMETERS :
1676 * @stream_list : streams to be configured
1677 *
1678 * RETURN : int32_t type of status
1679 * NO_ERROR -- success
1680 * none-zero failure code
1681 *==========================================================================*/
1682int QCamera3HardwareInterface::configureStreamsPerfLocked(
1683 camera3_stream_configuration_t *streamList)
1684{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001685 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001686 int rc = 0;
1687
1688 // Sanity check stream_list
1689 if (streamList == NULL) {
1690 LOGE("NULL stream configuration");
1691 return BAD_VALUE;
1692 }
1693 if (streamList->streams == NULL) {
1694 LOGE("NULL stream list");
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams < 1) {
1699 LOGE("Bad number of streams requested: %d",
1700 streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
1704 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1705 LOGE("Maximum number of streams %d exceeded: %d",
1706 MAX_NUM_STREAMS, streamList->num_streams);
1707 return BAD_VALUE;
1708 }
1709
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001710 rc = validateUsageFlags(streamList);
1711 if (rc != NO_ERROR) {
1712 return rc;
1713 }
1714
Thierry Strudel3d639192016-09-09 11:52:26 -07001715 mOpMode = streamList->operation_mode;
1716 LOGD("mOpMode: %d", mOpMode);
1717
1718 /* first invalidate all the steams in the mStreamList
1719 * if they appear again, they will be validated */
1720 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1721 it != mStreamInfo.end(); it++) {
1722 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1723 if (channel) {
1724 channel->stop();
1725 }
1726 (*it)->status = INVALID;
1727 }
1728
1729 if (mRawDumpChannel) {
1730 mRawDumpChannel->stop();
1731 delete mRawDumpChannel;
1732 mRawDumpChannel = NULL;
1733 }
1734
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001735 if (mHdrPlusRawSrcChannel) {
1736 mHdrPlusRawSrcChannel->stop();
1737 delete mHdrPlusRawSrcChannel;
1738 mHdrPlusRawSrcChannel = NULL;
1739 }
1740
Thierry Strudel3d639192016-09-09 11:52:26 -07001741 if (mSupportChannel)
1742 mSupportChannel->stop();
1743
1744 if (mAnalysisChannel) {
1745 mAnalysisChannel->stop();
1746 }
1747 if (mMetadataChannel) {
1748 /* If content of mStreamInfo is not 0, there is metadata stream */
1749 mMetadataChannel->stop();
1750 }
1751 if (mChannelHandle) {
1752 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1753 mChannelHandle);
1754 LOGD("stopping channel %d", mChannelHandle);
1755 }
1756
1757 pthread_mutex_lock(&mMutex);
1758
1759 // Check state
1760 switch (mState) {
1761 case INITIALIZED:
1762 case CONFIGURED:
1763 case STARTED:
1764 /* valid state */
1765 break;
1766 default:
1767 LOGE("Invalid state %d", mState);
1768 pthread_mutex_unlock(&mMutex);
1769 return -ENODEV;
1770 }
1771
1772 /* Check whether we have video stream */
1773 m_bIs4KVideo = false;
1774 m_bIsVideo = false;
1775 m_bEisSupportedSize = false;
1776 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001777 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001779 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001780 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001781 uint32_t videoWidth = 0U;
1782 uint32_t videoHeight = 0U;
1783 size_t rawStreamCnt = 0;
1784 size_t stallStreamCnt = 0;
1785 size_t processedStreamCnt = 0;
1786 // Number of streams on ISP encoder path
1787 size_t numStreamsOnEncoder = 0;
1788 size_t numYuv888OnEncoder = 0;
1789 bool bYuv888OverrideJpeg = false;
1790 cam_dimension_t largeYuv888Size = {0, 0};
1791 cam_dimension_t maxViewfinderSize = {0, 0};
1792 bool bJpegExceeds4K = false;
1793 bool bJpegOnEncoder = false;
1794 bool bUseCommonFeatureMask = false;
1795 cam_feature_mask_t commonFeatureMask = 0;
1796 bool bSmallJpegSize = false;
1797 uint32_t width_ratio;
1798 uint32_t height_ratio;
1799 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1800 camera3_stream_t *inputStream = NULL;
1801 bool isJpeg = false;
1802 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001803 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001804 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001805
1806 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1807
1808 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001809 uint8_t eis_prop_set;
1810 uint32_t maxEisWidth = 0;
1811 uint32_t maxEisHeight = 0;
1812
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001813 // Initialize all instant AEC related variables
1814 mInstantAEC = false;
1815 mResetInstantAEC = false;
1816 mInstantAECSettledFrameNumber = 0;
1817 mAecSkipDisplayFrameBound = 0;
1818 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001819 mCurrFeatureState = 0;
1820 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001821
Thierry Strudel3d639192016-09-09 11:52:26 -07001822 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1823
1824 size_t count = IS_TYPE_MAX;
1825 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1826 for (size_t i = 0; i < count; i++) {
1827 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1829 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 break;
1831 }
1832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001833
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001834 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 maxEisWidth = MAX_EIS_WIDTH;
1836 maxEisHeight = MAX_EIS_HEIGHT;
1837 }
1838
1839 /* EIS setprop control */
1840 char eis_prop[PROPERTY_VALUE_MAX];
1841 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001842 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 eis_prop_set = (uint8_t)atoi(eis_prop);
1844
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001845 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1847
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001848 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1849 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001850
Thierry Strudel3d639192016-09-09 11:52:26 -07001851 /* stream configurations */
1852 for (size_t i = 0; i < streamList->num_streams; i++) {
1853 camera3_stream_t *newStream = streamList->streams[i];
1854 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1855 "height = %d, rotation = %d, usage = 0x%x",
1856 i, newStream->stream_type, newStream->format,
1857 newStream->width, newStream->height, newStream->rotation,
1858 newStream->usage);
1859 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1860 newStream->stream_type == CAMERA3_STREAM_INPUT){
1861 isZsl = true;
1862 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001863 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1864 IS_USAGE_PREVIEW(newStream->usage)) {
1865 isPreview = true;
1866 }
1867
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1869 inputStream = newStream;
1870 }
1871
Emilian Peev7650c122017-01-19 08:24:33 -08001872 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1873 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001874 isJpeg = true;
1875 jpegSize.width = newStream->width;
1876 jpegSize.height = newStream->height;
1877 if (newStream->width > VIDEO_4K_WIDTH ||
1878 newStream->height > VIDEO_4K_HEIGHT)
1879 bJpegExceeds4K = true;
1880 }
1881
1882 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1883 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1884 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001885 // In HAL3 we can have multiple different video streams.
1886 // The variables video width and height are used below as
1887 // dimensions of the biggest of them
1888 if (videoWidth < newStream->width ||
1889 videoHeight < newStream->height) {
1890 videoWidth = newStream->width;
1891 videoHeight = newStream->height;
1892 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001893 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1894 (VIDEO_4K_HEIGHT <= newStream->height)) {
1895 m_bIs4KVideo = true;
1896 }
1897 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1898 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001899
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 }
1901 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1902 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1903 switch (newStream->format) {
1904 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001905 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1906 depthPresent = true;
1907 break;
1908 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001909 stallStreamCnt++;
1910 if (isOnEncoder(maxViewfinderSize, newStream->width,
1911 newStream->height)) {
1912 numStreamsOnEncoder++;
1913 bJpegOnEncoder = true;
1914 }
1915 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1916 newStream->width);
1917 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1918 newStream->height);;
1919 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1920 "FATAL: max_downscale_factor cannot be zero and so assert");
1921 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1922 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1923 LOGH("Setting small jpeg size flag to true");
1924 bSmallJpegSize = true;
1925 }
1926 break;
1927 case HAL_PIXEL_FORMAT_RAW10:
1928 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1929 case HAL_PIXEL_FORMAT_RAW16:
1930 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001931 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1932 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1933 pdStatCount++;
1934 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001935 break;
1936 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1937 processedStreamCnt++;
1938 if (isOnEncoder(maxViewfinderSize, newStream->width,
1939 newStream->height)) {
1940 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1941 !IS_USAGE_ZSL(newStream->usage)) {
1942 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1943 }
1944 numStreamsOnEncoder++;
1945 }
1946 break;
1947 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1948 processedStreamCnt++;
1949 if (isOnEncoder(maxViewfinderSize, newStream->width,
1950 newStream->height)) {
1951 // If Yuv888 size is not greater than 4K, set feature mask
1952 // to SUPERSET so that it support concurrent request on
1953 // YUV and JPEG.
1954 if (newStream->width <= VIDEO_4K_WIDTH &&
1955 newStream->height <= VIDEO_4K_HEIGHT) {
1956 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1957 }
1958 numStreamsOnEncoder++;
1959 numYuv888OnEncoder++;
1960 largeYuv888Size.width = newStream->width;
1961 largeYuv888Size.height = newStream->height;
1962 }
1963 break;
1964 default:
1965 processedStreamCnt++;
1966 if (isOnEncoder(maxViewfinderSize, newStream->width,
1967 newStream->height)) {
1968 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1969 numStreamsOnEncoder++;
1970 }
1971 break;
1972 }
1973
1974 }
1975 }
1976
1977 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1978 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1979 !m_bIsVideo) {
1980 m_bEisEnable = false;
1981 }
1982
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001983 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1984 pthread_mutex_unlock(&mMutex);
1985 return -EINVAL;
1986 }
1987
Thierry Strudel54dc9782017-02-15 12:12:10 -08001988 uint8_t forceEnableTnr = 0;
1989 char tnr_prop[PROPERTY_VALUE_MAX];
1990 memset(tnr_prop, 0, sizeof(tnr_prop));
1991 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1992 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1993
Thierry Strudel3d639192016-09-09 11:52:26 -07001994 /* Logic to enable/disable TNR based on specific config size/etc.*/
1995 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1997 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001998 else if (forceEnableTnr)
1999 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002000
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002001 char videoHdrProp[PROPERTY_VALUE_MAX];
2002 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2003 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2004 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2005
2006 if (hdr_mode_prop == 1 && m_bIsVideo &&
2007 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2008 m_bVideoHdrEnabled = true;
2009 else
2010 m_bVideoHdrEnabled = false;
2011
2012
Thierry Strudel3d639192016-09-09 11:52:26 -07002013 /* Check if num_streams is sane */
2014 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2015 rawStreamCnt > MAX_RAW_STREAMS ||
2016 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2017 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2018 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2019 pthread_mutex_unlock(&mMutex);
2020 return -EINVAL;
2021 }
2022 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002023 if (isZsl && m_bIs4KVideo) {
2024 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002025 pthread_mutex_unlock(&mMutex);
2026 return -EINVAL;
2027 }
2028 /* Check if stream sizes are sane */
2029 if (numStreamsOnEncoder > 2) {
2030 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2031 pthread_mutex_unlock(&mMutex);
2032 return -EINVAL;
2033 } else if (1 < numStreamsOnEncoder){
2034 bUseCommonFeatureMask = true;
2035 LOGH("Multiple streams above max viewfinder size, common mask needed");
2036 }
2037
2038 /* Check if BLOB size is greater than 4k in 4k recording case */
2039 if (m_bIs4KVideo && bJpegExceeds4K) {
2040 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2041 pthread_mutex_unlock(&mMutex);
2042 return -EINVAL;
2043 }
2044
Emilian Peev7650c122017-01-19 08:24:33 -08002045 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2046 depthPresent) {
2047 LOGE("HAL doesn't support depth streams in HFR mode!");
2048 pthread_mutex_unlock(&mMutex);
2049 return -EINVAL;
2050 }
2051
Thierry Strudel3d639192016-09-09 11:52:26 -07002052 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2053 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2054 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2055 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2056 // configurations:
2057 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2058 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2059 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2060 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2061 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2062 __func__);
2063 pthread_mutex_unlock(&mMutex);
2064 return -EINVAL;
2065 }
2066
2067 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2068 // the YUV stream's size is greater or equal to the JPEG size, set common
2069 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2070 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2071 jpegSize.width, jpegSize.height) &&
2072 largeYuv888Size.width > jpegSize.width &&
2073 largeYuv888Size.height > jpegSize.height) {
2074 bYuv888OverrideJpeg = true;
2075 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2076 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2077 }
2078
2079 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2080 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2081 commonFeatureMask);
2082 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2083 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2084
2085 rc = validateStreamDimensions(streamList);
2086 if (rc == NO_ERROR) {
2087 rc = validateStreamRotations(streamList);
2088 }
2089 if (rc != NO_ERROR) {
2090 LOGE("Invalid stream configuration requested!");
2091 pthread_mutex_unlock(&mMutex);
2092 return rc;
2093 }
2094
Emilian Peev0f3c3162017-03-15 12:57:46 +00002095 if (1 < pdStatCount) {
2096 LOGE("HAL doesn't support multiple PD streams");
2097 pthread_mutex_unlock(&mMutex);
2098 return -EINVAL;
2099 }
2100
2101 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2102 (1 == pdStatCount)) {
2103 LOGE("HAL doesn't support PD streams in HFR mode!");
2104 pthread_mutex_unlock(&mMutex);
2105 return -EINVAL;
2106 }
2107
Thierry Strudel3d639192016-09-09 11:52:26 -07002108 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2109 for (size_t i = 0; i < streamList->num_streams; i++) {
2110 camera3_stream_t *newStream = streamList->streams[i];
2111 LOGH("newStream type = %d, stream format = %d "
2112 "stream size : %d x %d, stream rotation = %d",
2113 newStream->stream_type, newStream->format,
2114 newStream->width, newStream->height, newStream->rotation);
2115 //if the stream is in the mStreamList validate it
2116 bool stream_exists = false;
2117 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2118 it != mStreamInfo.end(); it++) {
2119 if ((*it)->stream == newStream) {
2120 QCamera3ProcessingChannel *channel =
2121 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2122 stream_exists = true;
2123 if (channel)
2124 delete channel;
2125 (*it)->status = VALID;
2126 (*it)->stream->priv = NULL;
2127 (*it)->channel = NULL;
2128 }
2129 }
2130 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2131 //new stream
2132 stream_info_t* stream_info;
2133 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2134 if (!stream_info) {
2135 LOGE("Could not allocate stream info");
2136 rc = -ENOMEM;
2137 pthread_mutex_unlock(&mMutex);
2138 return rc;
2139 }
2140 stream_info->stream = newStream;
2141 stream_info->status = VALID;
2142 stream_info->channel = NULL;
2143 mStreamInfo.push_back(stream_info);
2144 }
2145 /* Covers Opaque ZSL and API1 F/W ZSL */
2146 if (IS_USAGE_ZSL(newStream->usage)
2147 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2148 if (zslStream != NULL) {
2149 LOGE("Multiple input/reprocess streams requested!");
2150 pthread_mutex_unlock(&mMutex);
2151 return BAD_VALUE;
2152 }
2153 zslStream = newStream;
2154 }
2155 /* Covers YUV reprocess */
2156 if (inputStream != NULL) {
2157 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2158 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2159 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2160 && inputStream->width == newStream->width
2161 && inputStream->height == newStream->height) {
2162 if (zslStream != NULL) {
2163 /* This scenario indicates multiple YUV streams with same size
2164 * as input stream have been requested, since zsl stream handle
2165 * is solely use for the purpose of overriding the size of streams
2166 * which share h/w streams we will just make a guess here as to
2167 * which of the stream is a ZSL stream, this will be refactored
2168 * once we make generic logic for streams sharing encoder output
2169 */
2170 LOGH("Warning, Multiple ip/reprocess streams requested!");
2171 }
2172 zslStream = newStream;
2173 }
2174 }
2175 }
2176
2177 /* If a zsl stream is set, we know that we have configured at least one input or
2178 bidirectional stream */
2179 if (NULL != zslStream) {
2180 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2181 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2182 mInputStreamInfo.format = zslStream->format;
2183 mInputStreamInfo.usage = zslStream->usage;
2184 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2185 mInputStreamInfo.dim.width,
2186 mInputStreamInfo.dim.height,
2187 mInputStreamInfo.format, mInputStreamInfo.usage);
2188 }
2189
2190 cleanAndSortStreamInfo();
2191 if (mMetadataChannel) {
2192 delete mMetadataChannel;
2193 mMetadataChannel = NULL;
2194 }
2195 if (mSupportChannel) {
2196 delete mSupportChannel;
2197 mSupportChannel = NULL;
2198 }
2199
2200 if (mAnalysisChannel) {
2201 delete mAnalysisChannel;
2202 mAnalysisChannel = NULL;
2203 }
2204
2205 if (mDummyBatchChannel) {
2206 delete mDummyBatchChannel;
2207 mDummyBatchChannel = NULL;
2208 }
2209
Emilian Peev7650c122017-01-19 08:24:33 -08002210 if (mDepthChannel) {
2211 mDepthChannel = NULL;
2212 }
2213
Thierry Strudel2896d122017-02-23 19:18:03 -08002214 char is_type_value[PROPERTY_VALUE_MAX];
2215 property_get("persist.camera.is_type", is_type_value, "4");
2216 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2217
Binhao Line406f062017-05-03 14:39:44 -07002218 char property_value[PROPERTY_VALUE_MAX];
2219 property_get("persist.camera.gzoom.at", property_value, "0");
2220 int goog_zoom_at = atoi(property_value);
2221 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2222 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2223
2224 property_get("persist.camera.gzoom.4k", property_value, "0");
2225 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2226
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 //Create metadata channel and initialize it
2228 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2229 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2230 gCamCapability[mCameraId]->color_arrangement);
2231 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2232 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002233 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002234 if (mMetadataChannel == NULL) {
2235 LOGE("failed to allocate metadata channel");
2236 rc = -ENOMEM;
2237 pthread_mutex_unlock(&mMutex);
2238 return rc;
2239 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002240 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002241 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2242 if (rc < 0) {
2243 LOGE("metadata channel initialization failed");
2244 delete mMetadataChannel;
2245 mMetadataChannel = NULL;
2246 pthread_mutex_unlock(&mMutex);
2247 return rc;
2248 }
2249
Thierry Strudel2896d122017-02-23 19:18:03 -08002250 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002251 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002252 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002253 // Keep track of preview/video streams indices.
2254 // There could be more than one preview streams, but only one video stream.
2255 int32_t video_stream_idx = -1;
2256 int32_t preview_stream_idx[streamList->num_streams];
2257 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2259 /* Allocate channel objects for the requested streams */
2260 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002261
Thierry Strudel3d639192016-09-09 11:52:26 -07002262 camera3_stream_t *newStream = streamList->streams[i];
2263 uint32_t stream_usage = newStream->usage;
2264 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2265 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2266 struct camera_info *p_info = NULL;
2267 pthread_mutex_lock(&gCamLock);
2268 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2269 pthread_mutex_unlock(&gCamLock);
2270 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2271 || IS_USAGE_ZSL(newStream->usage)) &&
2272 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002273 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002274 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002275 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2276 if (bUseCommonFeatureMask)
2277 zsl_ppmask = commonFeatureMask;
2278 else
2279 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002281 if (numStreamsOnEncoder > 0)
2282 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2283 else
2284 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002285 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002286 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002288 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002289 LOGH("Input stream configured, reprocess config");
2290 } else {
2291 //for non zsl streams find out the format
2292 switch (newStream->format) {
2293 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2294 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2297 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2298 /* add additional features to pp feature mask */
2299 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2300 mStreamConfigInfo.num_streams);
2301
2302 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2303 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2304 CAM_STREAM_TYPE_VIDEO;
2305 if (m_bTnrEnabled && m_bTnrVideo) {
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2307 CAM_QCOM_FEATURE_CPP_TNR;
2308 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2309 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2310 ~CAM_QCOM_FEATURE_CDS;
2311 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002312 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2313 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2314 CAM_QTI_FEATURE_PPEISCORE;
2315 }
Binhao Line406f062017-05-03 14:39:44 -07002316 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2317 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2318 CAM_QCOM_FEATURE_GOOG_ZOOM;
2319 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002320 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 } else {
2322 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2323 CAM_STREAM_TYPE_PREVIEW;
2324 if (m_bTnrEnabled && m_bTnrPreview) {
2325 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2326 CAM_QCOM_FEATURE_CPP_TNR;
2327 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2329 ~CAM_QCOM_FEATURE_CDS;
2330 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002331 if(!m_bSwTnrPreview) {
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2333 ~CAM_QTI_FEATURE_SW_TNR;
2334 }
Binhao Line406f062017-05-03 14:39:44 -07002335 if (is_goog_zoom_preview_enabled) {
2336 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2337 CAM_QCOM_FEATURE_GOOG_ZOOM;
2338 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002339 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002340 padding_info.width_padding = mSurfaceStridePadding;
2341 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002342 previewSize.width = (int32_t)newStream->width;
2343 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 }
2345 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2346 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2347 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2348 newStream->height;
2349 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2350 newStream->width;
2351 }
2352 }
2353 break;
2354 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002355 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002356 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2357 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2358 if (bUseCommonFeatureMask)
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2360 commonFeatureMask;
2361 else
2362 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2363 CAM_QCOM_FEATURE_NONE;
2364 } else {
2365 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2366 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2367 }
2368 break;
2369 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002370 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002371 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2372 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2373 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2375 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002376 /* Remove rotation if it is not supported
2377 for 4K LiveVideo snapshot case (online processing) */
2378 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2379 CAM_QCOM_FEATURE_ROTATION)) {
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2381 &= ~CAM_QCOM_FEATURE_ROTATION;
2382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002383 } else {
2384 if (bUseCommonFeatureMask &&
2385 isOnEncoder(maxViewfinderSize, newStream->width,
2386 newStream->height)) {
2387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2388 } else {
2389 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2390 }
2391 }
2392 if (isZsl) {
2393 if (zslStream) {
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2395 (int32_t)zslStream->width;
2396 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2397 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2399 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002400 } else {
2401 LOGE("Error, No ZSL stream identified");
2402 pthread_mutex_unlock(&mMutex);
2403 return -EINVAL;
2404 }
2405 } else if (m_bIs4KVideo) {
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2407 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2408 } else if (bYuv888OverrideJpeg) {
2409 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2410 (int32_t)largeYuv888Size.width;
2411 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2412 (int32_t)largeYuv888Size.height;
2413 }
2414 break;
2415 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2416 case HAL_PIXEL_FORMAT_RAW16:
2417 case HAL_PIXEL_FORMAT_RAW10:
2418 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2420 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002421 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2422 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2423 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2424 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2425 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2426 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2427 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2428 gCamCapability[mCameraId]->dt[mPDIndex];
2429 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2430 gCamCapability[mCameraId]->vc[mPDIndex];
2431 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002432 break;
2433 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002434 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002435 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2437 break;
2438 }
2439 }
2440
2441 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2442 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2443 gCamCapability[mCameraId]->color_arrangement);
2444
2445 if (newStream->priv == NULL) {
2446 //New stream, construct channel
2447 switch (newStream->stream_type) {
2448 case CAMERA3_STREAM_INPUT:
2449 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2450 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2451 break;
2452 case CAMERA3_STREAM_BIDIRECTIONAL:
2453 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2454 GRALLOC_USAGE_HW_CAMERA_WRITE;
2455 break;
2456 case CAMERA3_STREAM_OUTPUT:
2457 /* For video encoding stream, set read/write rarely
2458 * flag so that they may be set to un-cached */
2459 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2460 newStream->usage |=
2461 (GRALLOC_USAGE_SW_READ_RARELY |
2462 GRALLOC_USAGE_SW_WRITE_RARELY |
2463 GRALLOC_USAGE_HW_CAMERA_WRITE);
2464 else if (IS_USAGE_ZSL(newStream->usage))
2465 {
2466 LOGD("ZSL usage flag skipping");
2467 }
2468 else if (newStream == zslStream
2469 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2470 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2471 } else
2472 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2473 break;
2474 default:
2475 LOGE("Invalid stream_type %d", newStream->stream_type);
2476 break;
2477 }
2478
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002479 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002480 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2481 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2482 QCamera3ProcessingChannel *channel = NULL;
2483 switch (newStream->format) {
2484 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2485 if ((newStream->usage &
2486 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2487 (streamList->operation_mode ==
2488 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2489 ) {
2490 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2491 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002492 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002493 this,
2494 newStream,
2495 (cam_stream_type_t)
2496 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2497 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2498 mMetadataChannel,
2499 0); //heap buffers are not required for HFR video channel
2500 if (channel == NULL) {
2501 LOGE("allocation of channel failed");
2502 pthread_mutex_unlock(&mMutex);
2503 return -ENOMEM;
2504 }
2505 //channel->getNumBuffers() will return 0 here so use
2506 //MAX_INFLIGH_HFR_REQUESTS
2507 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2508 newStream->priv = channel;
2509 LOGI("num video buffers in HFR mode: %d",
2510 MAX_INFLIGHT_HFR_REQUESTS);
2511 } else {
2512 /* Copy stream contents in HFR preview only case to create
2513 * dummy batch channel so that sensor streaming is in
2514 * HFR mode */
2515 if (!m_bIsVideo && (streamList->operation_mode ==
2516 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2517 mDummyBatchStream = *newStream;
2518 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002519 int bufferCount = MAX_INFLIGHT_REQUESTS;
2520 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2521 CAM_STREAM_TYPE_VIDEO) {
2522 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2523 bufferCount = MAX_VIDEO_BUFFERS;
2524 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002525 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2526 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002527 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002528 this,
2529 newStream,
2530 (cam_stream_type_t)
2531 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2532 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2533 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002535 if (channel == NULL) {
2536 LOGE("allocation of channel failed");
2537 pthread_mutex_unlock(&mMutex);
2538 return -ENOMEM;
2539 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002540 /* disable UBWC for preview, though supported,
2541 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002542 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002543 (previewSize.width == (int32_t)videoWidth)&&
2544 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002545 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002546 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002547 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002548 /* When goog_zoom is linked to the preview or video stream,
2549 * disable ubwc to the linked stream */
2550 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2551 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2552 channel->setUBWCEnabled(false);
2553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002554 newStream->max_buffers = channel->getNumBuffers();
2555 newStream->priv = channel;
2556 }
2557 break;
2558 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2559 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2560 mChannelHandle,
2561 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002562 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 this,
2564 newStream,
2565 (cam_stream_type_t)
2566 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2567 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2568 mMetadataChannel);
2569 if (channel == NULL) {
2570 LOGE("allocation of YUV channel failed");
2571 pthread_mutex_unlock(&mMutex);
2572 return -ENOMEM;
2573 }
2574 newStream->max_buffers = channel->getNumBuffers();
2575 newStream->priv = channel;
2576 break;
2577 }
2578 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2579 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002580 case HAL_PIXEL_FORMAT_RAW10: {
2581 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2582 (HAL_DATASPACE_DEPTH != newStream->data_space))
2583 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002584 mRawChannel = new QCamera3RawChannel(
2585 mCameraHandle->camera_handle, mChannelHandle,
2586 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002587 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002588 this, newStream,
2589 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002590 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 if (mRawChannel == NULL) {
2592 LOGE("allocation of raw channel failed");
2593 pthread_mutex_unlock(&mMutex);
2594 return -ENOMEM;
2595 }
2596 newStream->max_buffers = mRawChannel->getNumBuffers();
2597 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2598 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002599 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002601 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2602 mDepthChannel = new QCamera3DepthChannel(
2603 mCameraHandle->camera_handle, mChannelHandle,
2604 mCameraHandle->ops, NULL, NULL, &padding_info,
2605 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2606 mMetadataChannel);
2607 if (NULL == mDepthChannel) {
2608 LOGE("Allocation of depth channel failed");
2609 pthread_mutex_unlock(&mMutex);
2610 return NO_MEMORY;
2611 }
2612 newStream->priv = mDepthChannel;
2613 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2614 } else {
2615 // Max live snapshot inflight buffer is 1. This is to mitigate
2616 // frame drop issues for video snapshot. The more buffers being
2617 // allocated, the more frame drops there are.
2618 mPictureChannel = new QCamera3PicChannel(
2619 mCameraHandle->camera_handle, mChannelHandle,
2620 mCameraHandle->ops, captureResultCb,
2621 setBufferErrorStatus, &padding_info, this, newStream,
2622 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2623 m_bIs4KVideo, isZsl, mMetadataChannel,
2624 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2625 if (mPictureChannel == NULL) {
2626 LOGE("allocation of channel failed");
2627 pthread_mutex_unlock(&mMutex);
2628 return -ENOMEM;
2629 }
2630 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2631 newStream->max_buffers = mPictureChannel->getNumBuffers();
2632 mPictureChannel->overrideYuvSize(
2633 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2634 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002635 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002636 break;
2637
2638 default:
2639 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002640 pthread_mutex_unlock(&mMutex);
2641 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002642 }
2643 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2644 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2645 } else {
2646 LOGE("Error, Unknown stream type");
2647 pthread_mutex_unlock(&mMutex);
2648 return -EINVAL;
2649 }
2650
2651 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002652 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2653 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002654 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002655 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2657 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2658 }
2659 }
2660
2661 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2662 it != mStreamInfo.end(); it++) {
2663 if ((*it)->stream == newStream) {
2664 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2665 break;
2666 }
2667 }
2668 } else {
2669 // Channel already exists for this stream
2670 // Do nothing for now
2671 }
2672 padding_info = gCamCapability[mCameraId]->padding_info;
2673
Emilian Peev7650c122017-01-19 08:24:33 -08002674 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002675 * since there is no real stream associated with it
2676 */
Emilian Peev7650c122017-01-19 08:24:33 -08002677 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002678 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2679 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002680 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002681 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002682 }
2683
Binhao Lincdb362a2017-04-20 13:31:54 -07002684 // By default, preview stream TNR is disabled.
2685 // Enable TNR to the preview stream if all conditions below are satisfied:
2686 // 1. resolution <= 1080p.
2687 // 2. preview resolution == video resolution.
2688 // 3. video stream TNR is enabled.
2689 // 4. EIS2.0
2690 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2691 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2692 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2693 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2694 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2695 video_stream->width == preview_stream->width &&
2696 video_stream->height == preview_stream->height) {
2697 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2698 CAM_QCOM_FEATURE_CPP_TNR;
2699 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2700 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2701 ~CAM_QCOM_FEATURE_CDS;
2702 }
2703 }
2704
Thierry Strudel2896d122017-02-23 19:18:03 -08002705 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2706 onlyRaw = false;
2707 }
2708
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002709 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002710 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002711 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002712 cam_analysis_info_t analysisInfo;
2713 int32_t ret = NO_ERROR;
2714 ret = mCommon.getAnalysisInfo(
2715 FALSE,
2716 analysisFeatureMask,
2717 &analysisInfo);
2718 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002719 cam_color_filter_arrangement_t analysis_color_arrangement =
2720 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2721 CAM_FILTER_ARRANGEMENT_Y :
2722 gCamCapability[mCameraId]->color_arrangement);
2723 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2724 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002725 cam_dimension_t analysisDim;
2726 analysisDim = mCommon.getMatchingDimension(previewSize,
2727 analysisInfo.analysis_recommended_res);
2728
2729 mAnalysisChannel = new QCamera3SupportChannel(
2730 mCameraHandle->camera_handle,
2731 mChannelHandle,
2732 mCameraHandle->ops,
2733 &analysisInfo.analysis_padding_info,
2734 analysisFeatureMask,
2735 CAM_STREAM_TYPE_ANALYSIS,
2736 &analysisDim,
2737 (analysisInfo.analysis_format
2738 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2739 : CAM_FORMAT_YUV_420_NV21),
2740 analysisInfo.hw_analysis_supported,
2741 gCamCapability[mCameraId]->color_arrangement,
2742 this,
2743 0); // force buffer count to 0
2744 } else {
2745 LOGW("getAnalysisInfo failed, ret = %d", ret);
2746 }
2747 if (!mAnalysisChannel) {
2748 LOGW("Analysis channel cannot be created");
2749 }
2750 }
2751
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 //RAW DUMP channel
2753 if (mEnableRawDump && isRawStreamRequested == false){
2754 cam_dimension_t rawDumpSize;
2755 rawDumpSize = getMaxRawSize(mCameraId);
2756 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2757 setPAAFSupport(rawDumpFeatureMask,
2758 CAM_STREAM_TYPE_RAW,
2759 gCamCapability[mCameraId]->color_arrangement);
2760 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2761 mChannelHandle,
2762 mCameraHandle->ops,
2763 rawDumpSize,
2764 &padding_info,
2765 this, rawDumpFeatureMask);
2766 if (!mRawDumpChannel) {
2767 LOGE("Raw Dump channel cannot be created");
2768 pthread_mutex_unlock(&mMutex);
2769 return -ENOMEM;
2770 }
2771 }
2772
Thierry Strudel3d639192016-09-09 11:52:26 -07002773 if (mAnalysisChannel) {
2774 cam_analysis_info_t analysisInfo;
2775 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2776 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2777 CAM_STREAM_TYPE_ANALYSIS;
2778 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2779 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002780 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002781 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2782 &analysisInfo);
2783 if (rc != NO_ERROR) {
2784 LOGE("getAnalysisInfo failed, ret = %d", rc);
2785 pthread_mutex_unlock(&mMutex);
2786 return rc;
2787 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002788 cam_color_filter_arrangement_t analysis_color_arrangement =
2789 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2790 CAM_FILTER_ARRANGEMENT_Y :
2791 gCamCapability[mCameraId]->color_arrangement);
2792 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2793 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2794 analysis_color_arrangement);
2795
Thierry Strudel3d639192016-09-09 11:52:26 -07002796 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002797 mCommon.getMatchingDimension(previewSize,
2798 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002799 mStreamConfigInfo.num_streams++;
2800 }
2801
Thierry Strudel2896d122017-02-23 19:18:03 -08002802 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002803 cam_analysis_info_t supportInfo;
2804 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2805 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2806 setPAAFSupport(callbackFeatureMask,
2807 CAM_STREAM_TYPE_CALLBACK,
2808 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002809 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002810 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002811 if (ret != NO_ERROR) {
2812 /* Ignore the error for Mono camera
2813 * because the PAAF bit mask is only set
2814 * for CAM_STREAM_TYPE_ANALYSIS stream type
2815 */
2816 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2817 LOGW("getAnalysisInfo failed, ret = %d", ret);
2818 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002819 }
2820 mSupportChannel = new QCamera3SupportChannel(
2821 mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops,
2824 &gCamCapability[mCameraId]->padding_info,
2825 callbackFeatureMask,
2826 CAM_STREAM_TYPE_CALLBACK,
2827 &QCamera3SupportChannel::kDim,
2828 CAM_FORMAT_YUV_420_NV21,
2829 supportInfo.hw_analysis_supported,
2830 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002831 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002832 if (!mSupportChannel) {
2833 LOGE("dummy channel cannot be created");
2834 pthread_mutex_unlock(&mMutex);
2835 return -ENOMEM;
2836 }
2837 }
2838
2839 if (mSupportChannel) {
2840 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2841 QCamera3SupportChannel::kDim;
2842 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2843 CAM_STREAM_TYPE_CALLBACK;
2844 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2845 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2846 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2847 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2848 gCamCapability[mCameraId]->color_arrangement);
2849 mStreamConfigInfo.num_streams++;
2850 }
2851
2852 if (mRawDumpChannel) {
2853 cam_dimension_t rawSize;
2854 rawSize = getMaxRawSize(mCameraId);
2855 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2856 rawSize;
2857 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2858 CAM_STREAM_TYPE_RAW;
2859 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2860 CAM_QCOM_FEATURE_NONE;
2861 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2862 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2863 gCamCapability[mCameraId]->color_arrangement);
2864 mStreamConfigInfo.num_streams++;
2865 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002866
2867 if (mHdrPlusRawSrcChannel) {
2868 cam_dimension_t rawSize;
2869 rawSize = getMaxRawSize(mCameraId);
2870 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2871 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2872 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2873 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2874 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2875 gCamCapability[mCameraId]->color_arrangement);
2876 mStreamConfigInfo.num_streams++;
2877 }
2878
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 /* In HFR mode, if video stream is not added, create a dummy channel so that
2880 * ISP can create a batch mode even for preview only case. This channel is
2881 * never 'start'ed (no stream-on), it is only 'initialized' */
2882 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2883 !m_bIsVideo) {
2884 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2885 setPAAFSupport(dummyFeatureMask,
2886 CAM_STREAM_TYPE_VIDEO,
2887 gCamCapability[mCameraId]->color_arrangement);
2888 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2889 mChannelHandle,
2890 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002891 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002892 this,
2893 &mDummyBatchStream,
2894 CAM_STREAM_TYPE_VIDEO,
2895 dummyFeatureMask,
2896 mMetadataChannel);
2897 if (NULL == mDummyBatchChannel) {
2898 LOGE("creation of mDummyBatchChannel failed."
2899 "Preview will use non-hfr sensor mode ");
2900 }
2901 }
2902 if (mDummyBatchChannel) {
2903 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2904 mDummyBatchStream.width;
2905 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2906 mDummyBatchStream.height;
2907 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2908 CAM_STREAM_TYPE_VIDEO;
2909 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2910 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2911 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2913 gCamCapability[mCameraId]->color_arrangement);
2914 mStreamConfigInfo.num_streams++;
2915 }
2916
2917 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2918 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002919 m_bIs4KVideo ? 0 :
2920 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002921
2922 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2923 for (pendingRequestIterator i = mPendingRequestsList.begin();
2924 i != mPendingRequestsList.end();) {
2925 i = erasePendingRequest(i);
2926 }
2927 mPendingFrameDropList.clear();
2928 // Initialize/Reset the pending buffers list
2929 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2930 req.mPendingBufferList.clear();
2931 }
2932 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2933
Thierry Strudel3d639192016-09-09 11:52:26 -07002934 mCurJpegMeta.clear();
2935 //Get min frame duration for this streams configuration
2936 deriveMinFrameDuration();
2937
Chien-Yu Chenee335912017-02-09 17:53:20 -08002938 mFirstPreviewIntentSeen = false;
2939
2940 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002941 {
2942 Mutex::Autolock l(gHdrPlusClientLock);
2943 disableHdrPlusModeLocked();
2944 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002945
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 // Update state
2947 mState = CONFIGURED;
2948
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002949 mFirstMetadataCallback = true;
2950
Thierry Strudel3d639192016-09-09 11:52:26 -07002951 pthread_mutex_unlock(&mMutex);
2952
2953 return rc;
2954}
2955
2956/*===========================================================================
2957 * FUNCTION : validateCaptureRequest
2958 *
2959 * DESCRIPTION: validate a capture request from camera service
2960 *
2961 * PARAMETERS :
2962 * @request : request from framework to process
2963 *
2964 * RETURN :
2965 *
2966 *==========================================================================*/
2967int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002968 camera3_capture_request_t *request,
2969 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002970{
2971 ssize_t idx = 0;
2972 const camera3_stream_buffer_t *b;
2973 CameraMetadata meta;
2974
2975 /* Sanity check the request */
2976 if (request == NULL) {
2977 LOGE("NULL capture request");
2978 return BAD_VALUE;
2979 }
2980
2981 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2982 /*settings cannot be null for the first request*/
2983 return BAD_VALUE;
2984 }
2985
2986 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002987 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2988 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002989 LOGE("Request %d: No output buffers provided!",
2990 __FUNCTION__, frameNumber);
2991 return BAD_VALUE;
2992 }
2993 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2994 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2995 request->num_output_buffers, MAX_NUM_STREAMS);
2996 return BAD_VALUE;
2997 }
2998 if (request->input_buffer != NULL) {
2999 b = request->input_buffer;
3000 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3001 LOGE("Request %d: Buffer %ld: Status not OK!",
3002 frameNumber, (long)idx);
3003 return BAD_VALUE;
3004 }
3005 if (b->release_fence != -1) {
3006 LOGE("Request %d: Buffer %ld: Has a release fence!",
3007 frameNumber, (long)idx);
3008 return BAD_VALUE;
3009 }
3010 if (b->buffer == NULL) {
3011 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3012 frameNumber, (long)idx);
3013 return BAD_VALUE;
3014 }
3015 }
3016
3017 // Validate all buffers
3018 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003019 if (b == NULL) {
3020 return BAD_VALUE;
3021 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003022 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003023 QCamera3ProcessingChannel *channel =
3024 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3025 if (channel == NULL) {
3026 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3027 frameNumber, (long)idx);
3028 return BAD_VALUE;
3029 }
3030 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3031 LOGE("Request %d: Buffer %ld: Status not OK!",
3032 frameNumber, (long)idx);
3033 return BAD_VALUE;
3034 }
3035 if (b->release_fence != -1) {
3036 LOGE("Request %d: Buffer %ld: Has a release fence!",
3037 frameNumber, (long)idx);
3038 return BAD_VALUE;
3039 }
3040 if (b->buffer == NULL) {
3041 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3042 frameNumber, (long)idx);
3043 return BAD_VALUE;
3044 }
3045 if (*(b->buffer) == NULL) {
3046 LOGE("Request %d: Buffer %ld: NULL private handle!",
3047 frameNumber, (long)idx);
3048 return BAD_VALUE;
3049 }
3050 idx++;
3051 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003052 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003053 return NO_ERROR;
3054}
3055
3056/*===========================================================================
3057 * FUNCTION : deriveMinFrameDuration
3058 *
3059 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3060 * on currently configured streams.
3061 *
3062 * PARAMETERS : NONE
3063 *
3064 * RETURN : NONE
3065 *
3066 *==========================================================================*/
3067void QCamera3HardwareInterface::deriveMinFrameDuration()
3068{
3069 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3070
3071 maxJpegDim = 0;
3072 maxProcessedDim = 0;
3073 maxRawDim = 0;
3074
3075 // Figure out maximum jpeg, processed, and raw dimensions
3076 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3077 it != mStreamInfo.end(); it++) {
3078
3079 // Input stream doesn't have valid stream_type
3080 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3081 continue;
3082
3083 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3084 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3085 if (dimension > maxJpegDim)
3086 maxJpegDim = dimension;
3087 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3088 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3089 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3090 if (dimension > maxRawDim)
3091 maxRawDim = dimension;
3092 } else {
3093 if (dimension > maxProcessedDim)
3094 maxProcessedDim = dimension;
3095 }
3096 }
3097
3098 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3099 MAX_SIZES_CNT);
3100
3101 //Assume all jpeg dimensions are in processed dimensions.
3102 if (maxJpegDim > maxProcessedDim)
3103 maxProcessedDim = maxJpegDim;
3104 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3105 if (maxProcessedDim > maxRawDim) {
3106 maxRawDim = INT32_MAX;
3107
3108 for (size_t i = 0; i < count; i++) {
3109 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3110 gCamCapability[mCameraId]->raw_dim[i].height;
3111 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3112 maxRawDim = dimension;
3113 }
3114 }
3115
3116 //Find minimum durations for processed, jpeg, and raw
3117 for (size_t i = 0; i < count; i++) {
3118 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3119 gCamCapability[mCameraId]->raw_dim[i].height) {
3120 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3121 break;
3122 }
3123 }
3124 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3125 for (size_t i = 0; i < count; i++) {
3126 if (maxProcessedDim ==
3127 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3128 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3129 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3130 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3131 break;
3132 }
3133 }
3134}
3135
3136/*===========================================================================
3137 * FUNCTION : getMinFrameDuration
3138 *
3139 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3140 * and current request configuration.
3141 *
3142 * PARAMETERS : @request: requset sent by the frameworks
3143 *
3144 * RETURN : min farme duration for a particular request
3145 *
3146 *==========================================================================*/
3147int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3148{
3149 bool hasJpegStream = false;
3150 bool hasRawStream = false;
3151 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3152 const camera3_stream_t *stream = request->output_buffers[i].stream;
3153 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3154 hasJpegStream = true;
3155 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3156 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3157 stream->format == HAL_PIXEL_FORMAT_RAW16)
3158 hasRawStream = true;
3159 }
3160
3161 if (!hasJpegStream)
3162 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3163 else
3164 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3165}
3166
3167/*===========================================================================
3168 * FUNCTION : handleBuffersDuringFlushLock
3169 *
3170 * DESCRIPTION: Account for buffers returned from back-end during flush
3171 * This function is executed while mMutex is held by the caller.
3172 *
3173 * PARAMETERS :
3174 * @buffer: image buffer for the callback
3175 *
3176 * RETURN :
3177 *==========================================================================*/
3178void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3179{
3180 bool buffer_found = false;
3181 for (List<PendingBuffersInRequest>::iterator req =
3182 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3183 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3184 for (List<PendingBufferInfo>::iterator i =
3185 req->mPendingBufferList.begin();
3186 i != req->mPendingBufferList.end(); i++) {
3187 if (i->buffer == buffer->buffer) {
3188 mPendingBuffersMap.numPendingBufsAtFlush--;
3189 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3190 buffer->buffer, req->frame_number,
3191 mPendingBuffersMap.numPendingBufsAtFlush);
3192 buffer_found = true;
3193 break;
3194 }
3195 }
3196 if (buffer_found) {
3197 break;
3198 }
3199 }
3200 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3201 //signal the flush()
3202 LOGD("All buffers returned to HAL. Continue flush");
3203 pthread_cond_signal(&mBuffersCond);
3204 }
3205}
3206
Thierry Strudel3d639192016-09-09 11:52:26 -07003207/*===========================================================================
3208 * FUNCTION : handleBatchMetadata
3209 *
3210 * DESCRIPTION: Handles metadata buffer callback in batch mode
3211 *
3212 * PARAMETERS : @metadata_buf: metadata buffer
3213 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3214 * the meta buf in this method
3215 *
3216 * RETURN :
3217 *
3218 *==========================================================================*/
3219void QCamera3HardwareInterface::handleBatchMetadata(
3220 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3221{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003222 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003223
3224 if (NULL == metadata_buf) {
3225 LOGE("metadata_buf is NULL");
3226 return;
3227 }
3228 /* In batch mode, the metdata will contain the frame number and timestamp of
3229 * the last frame in the batch. Eg: a batch containing buffers from request
3230 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3231 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3232 * multiple process_capture_results */
3233 metadata_buffer_t *metadata =
3234 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3235 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3236 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3237 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3238 uint32_t frame_number = 0, urgent_frame_number = 0;
3239 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3240 bool invalid_metadata = false;
3241 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3242 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003243 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003244
3245 int32_t *p_frame_number_valid =
3246 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3247 uint32_t *p_frame_number =
3248 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3249 int64_t *p_capture_time =
3250 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3251 int32_t *p_urgent_frame_number_valid =
3252 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3253 uint32_t *p_urgent_frame_number =
3254 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3255
3256 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3257 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3258 (NULL == p_urgent_frame_number)) {
3259 LOGE("Invalid metadata");
3260 invalid_metadata = true;
3261 } else {
3262 frame_number_valid = *p_frame_number_valid;
3263 last_frame_number = *p_frame_number;
3264 last_frame_capture_time = *p_capture_time;
3265 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3266 last_urgent_frame_number = *p_urgent_frame_number;
3267 }
3268
3269 /* In batchmode, when no video buffers are requested, set_parms are sent
3270 * for every capture_request. The difference between consecutive urgent
3271 * frame numbers and frame numbers should be used to interpolate the
3272 * corresponding frame numbers and time stamps */
3273 pthread_mutex_lock(&mMutex);
3274 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003275 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3276 if(idx < 0) {
3277 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3278 last_urgent_frame_number);
3279 mState = ERROR;
3280 pthread_mutex_unlock(&mMutex);
3281 return;
3282 }
3283 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003284 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3285 first_urgent_frame_number;
3286
3287 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3288 urgent_frame_number_valid,
3289 first_urgent_frame_number, last_urgent_frame_number);
3290 }
3291
3292 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003293 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3294 if(idx < 0) {
3295 LOGE("Invalid frame number received: %d. Irrecoverable error",
3296 last_frame_number);
3297 mState = ERROR;
3298 pthread_mutex_unlock(&mMutex);
3299 return;
3300 }
3301 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003302 frameNumDiff = last_frame_number + 1 -
3303 first_frame_number;
3304 mPendingBatchMap.removeItem(last_frame_number);
3305
3306 LOGD("frm: valid: %d frm_num: %d - %d",
3307 frame_number_valid,
3308 first_frame_number, last_frame_number);
3309
3310 }
3311 pthread_mutex_unlock(&mMutex);
3312
3313 if (urgent_frame_number_valid || frame_number_valid) {
3314 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3315 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3316 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3317 urgentFrameNumDiff, last_urgent_frame_number);
3318 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3319 LOGE("frameNumDiff: %d frameNum: %d",
3320 frameNumDiff, last_frame_number);
3321 }
3322
3323 for (size_t i = 0; i < loopCount; i++) {
3324 /* handleMetadataWithLock is called even for invalid_metadata for
3325 * pipeline depth calculation */
3326 if (!invalid_metadata) {
3327 /* Infer frame number. Batch metadata contains frame number of the
3328 * last frame */
3329 if (urgent_frame_number_valid) {
3330 if (i < urgentFrameNumDiff) {
3331 urgent_frame_number =
3332 first_urgent_frame_number + i;
3333 LOGD("inferred urgent frame_number: %d",
3334 urgent_frame_number);
3335 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3336 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3337 } else {
3338 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3339 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3340 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3341 }
3342 }
3343
3344 /* Infer frame number. Batch metadata contains frame number of the
3345 * last frame */
3346 if (frame_number_valid) {
3347 if (i < frameNumDiff) {
3348 frame_number = first_frame_number + i;
3349 LOGD("inferred frame_number: %d", frame_number);
3350 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3351 CAM_INTF_META_FRAME_NUMBER, frame_number);
3352 } else {
3353 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3354 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3355 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3356 }
3357 }
3358
3359 if (last_frame_capture_time) {
3360 //Infer timestamp
3361 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003362 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003364 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003365 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3366 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3367 LOGD("batch capture_time: %lld, capture_time: %lld",
3368 last_frame_capture_time, capture_time);
3369 }
3370 }
3371 pthread_mutex_lock(&mMutex);
3372 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003373 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003374 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3375 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003376 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003377 pthread_mutex_unlock(&mMutex);
3378 }
3379
3380 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003381 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003382 mMetadataChannel->bufDone(metadata_buf);
3383 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003384 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003385 }
3386}
3387
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003388void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3389 camera3_error_msg_code_t errorCode)
3390{
3391 camera3_notify_msg_t notify_msg;
3392 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3393 notify_msg.type = CAMERA3_MSG_ERROR;
3394 notify_msg.message.error.error_code = errorCode;
3395 notify_msg.message.error.error_stream = NULL;
3396 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003397 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003398
3399 return;
3400}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003401
3402/*===========================================================================
3403 * FUNCTION : sendPartialMetadataWithLock
3404 *
3405 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3406 *
3407 * PARAMETERS : @metadata: metadata buffer
3408 * @requestIter: The iterator for the pending capture request for
3409 * which the partial result is being sen
3410 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3411 * last urgent metadata in a batch. Always true for non-batch mode
3412 *
3413 * RETURN :
3414 *
3415 *==========================================================================*/
3416
3417void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3418 metadata_buffer_t *metadata,
3419 const pendingRequestIterator requestIter,
3420 bool lastUrgentMetadataInBatch)
3421{
3422 camera3_capture_result_t result;
3423 memset(&result, 0, sizeof(camera3_capture_result_t));
3424
3425 requestIter->partial_result_cnt++;
3426
3427 // Extract 3A metadata
3428 result.result = translateCbUrgentMetadataToResultMetadata(
3429 metadata, lastUrgentMetadataInBatch);
3430 // Populate metadata result
3431 result.frame_number = requestIter->frame_number;
3432 result.num_output_buffers = 0;
3433 result.output_buffers = NULL;
3434 result.partial_result = requestIter->partial_result_cnt;
3435
3436 {
3437 Mutex::Autolock l(gHdrPlusClientLock);
3438 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3439 // Notify HDR+ client about the partial metadata.
3440 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3441 result.partial_result == PARTIAL_RESULT_COUNT);
3442 }
3443 }
3444
3445 orchestrateResult(&result);
3446 LOGD("urgent frame_number = %u", result.frame_number);
3447 free_camera_metadata((camera_metadata_t *)result.result);
3448}
3449
Thierry Strudel3d639192016-09-09 11:52:26 -07003450/*===========================================================================
3451 * FUNCTION : handleMetadataWithLock
3452 *
3453 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3454 *
3455 * PARAMETERS : @metadata_buf: metadata buffer
3456 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3457 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003458 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3459 * last urgent metadata in a batch. Always true for non-batch mode
3460 * @lastMetadataInBatch: Boolean to indicate whether this is the
3461 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003462 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3463 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003464 *
3465 * RETURN :
3466 *
3467 *==========================================================================*/
3468void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003469 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003470 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3471 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003472{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003473 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003474 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3475 //during flush do not send metadata from this thread
3476 LOGD("not sending metadata during flush or when mState is error");
3477 if (free_and_bufdone_meta_buf) {
3478 mMetadataChannel->bufDone(metadata_buf);
3479 free(metadata_buf);
3480 }
3481 return;
3482 }
3483
3484 //not in flush
3485 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3486 int32_t frame_number_valid, urgent_frame_number_valid;
3487 uint32_t frame_number, urgent_frame_number;
3488 int64_t capture_time;
3489 nsecs_t currentSysTime;
3490
3491 int32_t *p_frame_number_valid =
3492 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3493 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3494 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3495 int32_t *p_urgent_frame_number_valid =
3496 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3497 uint32_t *p_urgent_frame_number =
3498 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3499 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3500 metadata) {
3501 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3502 *p_frame_number_valid, *p_frame_number);
3503 }
3504
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003505 camera_metadata_t *resultMetadata = nullptr;
3506
Thierry Strudel3d639192016-09-09 11:52:26 -07003507 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3508 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3509 LOGE("Invalid metadata");
3510 if (free_and_bufdone_meta_buf) {
3511 mMetadataChannel->bufDone(metadata_buf);
3512 free(metadata_buf);
3513 }
3514 goto done_metadata;
3515 }
3516 frame_number_valid = *p_frame_number_valid;
3517 frame_number = *p_frame_number;
3518 capture_time = *p_capture_time;
3519 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3520 urgent_frame_number = *p_urgent_frame_number;
3521 currentSysTime = systemTime(CLOCK_MONOTONIC);
3522
3523 // Detect if buffers from any requests are overdue
3524 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525 int64_t timeout;
3526 {
3527 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3528 // If there is a pending HDR+ request, the following requests may be blocked until the
3529 // HDR+ request is done. So allow a longer timeout.
3530 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3531 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3532 }
3533
3534 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003536 assert(missed.stream->priv);
3537 if (missed.stream->priv) {
3538 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3539 assert(ch->mStreams[0]);
3540 if (ch->mStreams[0]) {
3541 LOGE("Cancel missing frame = %d, buffer = %p,"
3542 "stream type = %d, stream format = %d",
3543 req.frame_number, missed.buffer,
3544 ch->mStreams[0]->getMyType(), missed.stream->format);
3545 ch->timeoutFrame(req.frame_number);
3546 }
3547 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003548 }
3549 }
3550 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003551 //For the very first metadata callback, regardless whether it contains valid
3552 //frame number, send the partial metadata for the jumpstarting requests.
3553 //Note that this has to be done even if the metadata doesn't contain valid
3554 //urgent frame number, because in the case only 1 request is ever submitted
3555 //to HAL, there won't be subsequent valid urgent frame number.
3556 if (mFirstMetadataCallback) {
3557 for (pendingRequestIterator i =
3558 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3559 if (i->bUseFirstPartial) {
3560 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3561 }
3562 }
3563 mFirstMetadataCallback = false;
3564 }
3565
Thierry Strudel3d639192016-09-09 11:52:26 -07003566 //Partial result on process_capture_result for timestamp
3567 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003568 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003569
3570 //Recieved an urgent Frame Number, handle it
3571 //using partial results
3572 for (pendingRequestIterator i =
3573 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3574 LOGD("Iterator Frame = %d urgent frame = %d",
3575 i->frame_number, urgent_frame_number);
3576
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003577 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003578 (i->partial_result_cnt == 0)) {
3579 LOGE("Error: HAL missed urgent metadata for frame number %d",
3580 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003581 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 }
3583
3584 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003585 i->partial_result_cnt == 0) {
3586 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003587 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3588 // Instant AEC settled for this frame.
3589 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3590 mInstantAECSettledFrameNumber = urgent_frame_number;
3591 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 break;
3593 }
3594 }
3595 }
3596
3597 if (!frame_number_valid) {
3598 LOGD("Not a valid normal frame number, used as SOF only");
3599 if (free_and_bufdone_meta_buf) {
3600 mMetadataChannel->bufDone(metadata_buf);
3601 free(metadata_buf);
3602 }
3603 goto done_metadata;
3604 }
3605 LOGH("valid frame_number = %u, capture_time = %lld",
3606 frame_number, capture_time);
3607
Emilian Peev7650c122017-01-19 08:24:33 -08003608 if (metadata->is_depth_data_valid) {
3609 handleDepthDataLocked(metadata->depth_data, frame_number);
3610 }
3611
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003612 // Check whether any stream buffer corresponding to this is dropped or not
3613 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3614 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3615 for (auto & pendingRequest : mPendingRequestsList) {
3616 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3617 mInstantAECSettledFrameNumber)) {
3618 camera3_notify_msg_t notify_msg = {};
3619 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003620 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 QCamera3ProcessingChannel *channel =
3622 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003623 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003624 if (p_cam_frame_drop) {
3625 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003626 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003627 // Got the stream ID for drop frame.
3628 dropFrame = true;
3629 break;
3630 }
3631 }
3632 } else {
3633 // This is instant AEC case.
3634 // For instant AEC drop the stream untill AEC is settled.
3635 dropFrame = true;
3636 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003638 if (dropFrame) {
3639 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3640 if (p_cam_frame_drop) {
3641 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003642 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003644 } else {
3645 // For instant AEC, inform frame drop and frame number
3646 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3647 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003648 pendingRequest.frame_number, streamID,
3649 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003650 }
3651 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003652 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003653 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003654 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003655 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003656 if (p_cam_frame_drop) {
3657 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003658 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003659 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003660 } else {
3661 // For instant AEC, inform frame drop and frame number
3662 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3663 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003664 pendingRequest.frame_number, streamID,
3665 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003666 }
3667 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003669 PendingFrameDrop.stream_ID = streamID;
3670 // Add the Frame drop info to mPendingFrameDropList
3671 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003673 }
3674 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003675 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003676
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003677 for (auto & pendingRequest : mPendingRequestsList) {
3678 // Find the pending request with the frame number.
3679 if (pendingRequest.frame_number == frame_number) {
3680 // Update the sensor timestamp.
3681 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003682
Thierry Strudel3d639192016-09-09 11:52:26 -07003683
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003684 /* Set the timestamp in display metadata so that clients aware of
3685 private_handle such as VT can use this un-modified timestamps.
3686 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003687 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003688
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 // Find channel requiring metadata, meaning internal offline postprocess
3690 // is needed.
3691 //TODO: for now, we don't support two streams requiring metadata at the same time.
3692 // (because we are not making copies, and metadata buffer is not reference counted.
3693 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003694 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3695 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003696 if (iter->need_metadata) {
3697 internalPproc = true;
3698 QCamera3ProcessingChannel *channel =
3699 (QCamera3ProcessingChannel *)iter->stream->priv;
3700 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003701 if(p_is_metabuf_queued != NULL) {
3702 *p_is_metabuf_queued = true;
3703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003704 break;
3705 }
3706 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003707 for (auto itr = pendingRequest.internalRequestList.begin();
3708 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003709 if (itr->need_metadata) {
3710 internalPproc = true;
3711 QCamera3ProcessingChannel *channel =
3712 (QCamera3ProcessingChannel *)itr->stream->priv;
3713 channel->queueReprocMetadata(metadata_buf);
3714 break;
3715 }
3716 }
3717
Thierry Strudel54dc9782017-02-15 12:12:10 -08003718 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003719
3720 bool *enableZsl = nullptr;
3721 if (gExposeEnableZslKey) {
3722 enableZsl = &pendingRequest.enableZsl;
3723 }
3724
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003725 resultMetadata = translateFromHalMetadata(metadata,
3726 pendingRequest.timestamp, pendingRequest.request_id,
3727 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3728 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003729 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003730 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003732 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003733 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003734 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003735
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003736 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003737
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003738 if (pendingRequest.blob_request) {
3739 //Dump tuning metadata if enabled and available
3740 char prop[PROPERTY_VALUE_MAX];
3741 memset(prop, 0, sizeof(prop));
3742 property_get("persist.camera.dumpmetadata", prop, "0");
3743 int32_t enabled = atoi(prop);
3744 if (enabled && metadata->is_tuning_params_valid) {
3745 dumpMetadataToFile(metadata->tuning_params,
3746 mMetaFrameCount,
3747 enabled,
3748 "Snapshot",
3749 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003750 }
3751 }
3752
3753 if (!internalPproc) {
3754 LOGD("couldn't find need_metadata for this metadata");
3755 // Return metadata buffer
3756 if (free_and_bufdone_meta_buf) {
3757 mMetadataChannel->bufDone(metadata_buf);
3758 free(metadata_buf);
3759 }
3760 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003761
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003763 }
3764 }
3765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 // Try to send out shutter callbacks and capture results.
3767 handlePendingResultsWithLock(frame_number, resultMetadata);
3768 return;
3769
Thierry Strudel3d639192016-09-09 11:52:26 -07003770done_metadata:
3771 for (pendingRequestIterator i = mPendingRequestsList.begin();
3772 i != mPendingRequestsList.end() ;i++) {
3773 i->pipeline_depth++;
3774 }
3775 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3776 unblockRequestIfNecessary();
3777}
3778
3779/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003780 * FUNCTION : handleDepthDataWithLock
3781 *
3782 * DESCRIPTION: Handles incoming depth data
3783 *
3784 * PARAMETERS : @depthData : Depth data
3785 * @frameNumber: Frame number of the incoming depth data
3786 *
3787 * RETURN :
3788 *
3789 *==========================================================================*/
3790void QCamera3HardwareInterface::handleDepthDataLocked(
3791 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3792 uint32_t currentFrameNumber;
3793 buffer_handle_t *depthBuffer;
3794
3795 if (nullptr == mDepthChannel) {
3796 LOGE("Depth channel not present!");
3797 return;
3798 }
3799
3800 camera3_stream_buffer_t resultBuffer =
3801 {.acquire_fence = -1,
3802 .release_fence = -1,
3803 .status = CAMERA3_BUFFER_STATUS_OK,
3804 .buffer = nullptr,
3805 .stream = mDepthChannel->getStream()};
3806 camera3_capture_result_t result =
3807 {.result = nullptr,
3808 .num_output_buffers = 1,
3809 .output_buffers = &resultBuffer,
3810 .partial_result = 0,
3811 .frame_number = 0};
3812
3813 do {
3814 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3815 if (nullptr == depthBuffer) {
3816 break;
3817 }
3818
3819 result.frame_number = currentFrameNumber;
3820 resultBuffer.buffer = depthBuffer;
3821 if (currentFrameNumber == frameNumber) {
3822 int32_t rc = mDepthChannel->populateDepthData(depthData,
3823 frameNumber);
3824 if (NO_ERROR != rc) {
3825 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3826 } else {
3827 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3828 }
3829 } else if (currentFrameNumber > frameNumber) {
3830 break;
3831 } else {
3832 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3833 {{currentFrameNumber, mDepthChannel->getStream(),
3834 CAMERA3_MSG_ERROR_BUFFER}}};
3835 orchestrateNotify(&notify_msg);
3836
3837 LOGE("Depth buffer for frame number: %d is missing "
3838 "returning back!", currentFrameNumber);
3839 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3840 }
3841 mDepthChannel->unmapBuffer(currentFrameNumber);
3842
3843 orchestrateResult(&result);
3844 } while (currentFrameNumber < frameNumber);
3845}
3846
3847/*===========================================================================
3848 * FUNCTION : notifyErrorFoPendingDepthData
3849 *
3850 * DESCRIPTION: Returns error for any pending depth buffers
3851 *
3852 * PARAMETERS : depthCh - depth channel that needs to get flushed
3853 *
3854 * RETURN :
3855 *
3856 *==========================================================================*/
3857void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3858 QCamera3DepthChannel *depthCh) {
3859 uint32_t currentFrameNumber;
3860 buffer_handle_t *depthBuffer;
3861
3862 if (nullptr == depthCh) {
3863 return;
3864 }
3865
3866 camera3_notify_msg_t notify_msg =
3867 {.type = CAMERA3_MSG_ERROR,
3868 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3869 camera3_stream_buffer_t resultBuffer =
3870 {.acquire_fence = -1,
3871 .release_fence = -1,
3872 .buffer = nullptr,
3873 .stream = depthCh->getStream(),
3874 .status = CAMERA3_BUFFER_STATUS_ERROR};
3875 camera3_capture_result_t result =
3876 {.result = nullptr,
3877 .frame_number = 0,
3878 .num_output_buffers = 1,
3879 .partial_result = 0,
3880 .output_buffers = &resultBuffer};
3881
3882 while (nullptr !=
3883 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3884 depthCh->unmapBuffer(currentFrameNumber);
3885
3886 notify_msg.message.error.frame_number = currentFrameNumber;
3887 orchestrateNotify(&notify_msg);
3888
3889 resultBuffer.buffer = depthBuffer;
3890 result.frame_number = currentFrameNumber;
3891 orchestrateResult(&result);
3892 };
3893}
3894
3895/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003896 * FUNCTION : hdrPlusPerfLock
3897 *
3898 * DESCRIPTION: perf lock for HDR+ using custom intent
3899 *
3900 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3901 *
3902 * RETURN : None
3903 *
3904 *==========================================================================*/
3905void QCamera3HardwareInterface::hdrPlusPerfLock(
3906 mm_camera_super_buf_t *metadata_buf)
3907{
3908 if (NULL == metadata_buf) {
3909 LOGE("metadata_buf is NULL");
3910 return;
3911 }
3912 metadata_buffer_t *metadata =
3913 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3914 int32_t *p_frame_number_valid =
3915 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3916 uint32_t *p_frame_number =
3917 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3918
3919 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3920 LOGE("%s: Invalid metadata", __func__);
3921 return;
3922 }
3923
3924 //acquire perf lock for 5 sec after the last HDR frame is captured
3925 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3926 if ((p_frame_number != NULL) &&
3927 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003929 }
3930 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003931}
3932
3933/*===========================================================================
3934 * FUNCTION : handleInputBufferWithLock
3935 *
3936 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3937 *
3938 * PARAMETERS : @frame_number: frame number of the input buffer
3939 *
3940 * RETURN :
3941 *
3942 *==========================================================================*/
3943void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3944{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003945 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003946 pendingRequestIterator i = mPendingRequestsList.begin();
3947 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3948 i++;
3949 }
3950 if (i != mPendingRequestsList.end() && i->input_buffer) {
3951 //found the right request
3952 if (!i->shutter_notified) {
3953 CameraMetadata settings;
3954 camera3_notify_msg_t notify_msg;
3955 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3956 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3957 if(i->settings) {
3958 settings = i->settings;
3959 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3960 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3961 } else {
3962 LOGE("No timestamp in input settings! Using current one.");
3963 }
3964 } else {
3965 LOGE("Input settings missing!");
3966 }
3967
3968 notify_msg.type = CAMERA3_MSG_SHUTTER;
3969 notify_msg.message.shutter.frame_number = frame_number;
3970 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003971 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003972 i->shutter_notified = true;
3973 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3974 i->frame_number, notify_msg.message.shutter.timestamp);
3975 }
3976
3977 if (i->input_buffer->release_fence != -1) {
3978 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3979 close(i->input_buffer->release_fence);
3980 if (rc != OK) {
3981 LOGE("input buffer sync wait failed %d", rc);
3982 }
3983 }
3984
3985 camera3_capture_result result;
3986 memset(&result, 0, sizeof(camera3_capture_result));
3987 result.frame_number = frame_number;
3988 result.result = i->settings;
3989 result.input_buffer = i->input_buffer;
3990 result.partial_result = PARTIAL_RESULT_COUNT;
3991
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003992 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003993 LOGD("Input request metadata and input buffer frame_number = %u",
3994 i->frame_number);
3995 i = erasePendingRequest(i);
3996 } else {
3997 LOGE("Could not find input request for frame number %d", frame_number);
3998 }
3999}
4000
4001/*===========================================================================
4002 * FUNCTION : handleBufferWithLock
4003 *
4004 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4005 *
4006 * PARAMETERS : @buffer: image buffer for the callback
4007 * @frame_number: frame number of the image buffer
4008 *
4009 * RETURN :
4010 *
4011 *==========================================================================*/
4012void QCamera3HardwareInterface::handleBufferWithLock(
4013 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4014{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004015 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004016
4017 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4018 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4019 }
4020
Thierry Strudel3d639192016-09-09 11:52:26 -07004021 /* Nothing to be done during error state */
4022 if ((ERROR == mState) || (DEINIT == mState)) {
4023 return;
4024 }
4025 if (mFlushPerf) {
4026 handleBuffersDuringFlushLock(buffer);
4027 return;
4028 }
4029 //not in flush
4030 // If the frame number doesn't exist in the pending request list,
4031 // directly send the buffer to the frameworks, and update pending buffers map
4032 // Otherwise, book-keep the buffer.
4033 pendingRequestIterator i = mPendingRequestsList.begin();
4034 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4035 i++;
4036 }
4037 if (i == mPendingRequestsList.end()) {
4038 // Verify all pending requests frame_numbers are greater
4039 for (pendingRequestIterator j = mPendingRequestsList.begin();
4040 j != mPendingRequestsList.end(); j++) {
4041 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4042 LOGW("Error: pending live frame number %d is smaller than %d",
4043 j->frame_number, frame_number);
4044 }
4045 }
4046 camera3_capture_result_t result;
4047 memset(&result, 0, sizeof(camera3_capture_result_t));
4048 result.result = NULL;
4049 result.frame_number = frame_number;
4050 result.num_output_buffers = 1;
4051 result.partial_result = 0;
4052 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4053 m != mPendingFrameDropList.end(); m++) {
4054 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4055 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4056 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4057 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4058 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4059 frame_number, streamID);
4060 m = mPendingFrameDropList.erase(m);
4061 break;
4062 }
4063 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004064 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 result.output_buffers = buffer;
4066 LOGH("result frame_number = %d, buffer = %p",
4067 frame_number, buffer->buffer);
4068
4069 mPendingBuffersMap.removeBuf(buffer->buffer);
4070
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004071 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004072 } else {
4073 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 if (i->input_buffer->release_fence != -1) {
4075 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4076 close(i->input_buffer->release_fence);
4077 if (rc != OK) {
4078 LOGE("input buffer sync wait failed %d", rc);
4079 }
4080 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004081 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004082
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004083 // Put buffer into the pending request
4084 for (auto &requestedBuffer : i->buffers) {
4085 if (requestedBuffer.stream == buffer->stream) {
4086 if (requestedBuffer.buffer != nullptr) {
4087 LOGE("Error: buffer is already set");
4088 } else {
4089 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4090 sizeof(camera3_stream_buffer_t));
4091 *(requestedBuffer.buffer) = *buffer;
4092 LOGH("cache buffer %p at result frame_number %u",
4093 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004094 }
4095 }
4096 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004097
4098 if (i->input_buffer) {
4099 // For a reprocessing request, try to send out shutter callback and result metadata.
4100 handlePendingResultsWithLock(frame_number, nullptr);
4101 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004103
4104 if (mPreviewStarted == false) {
4105 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4106 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004107 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4108
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004109 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4110 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4111 mPreviewStarted = true;
4112
4113 // Set power hint for preview
4114 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4115 }
4116 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004117}
4118
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004119void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4120 const camera_metadata_t *resultMetadata)
4121{
4122 // Find the pending request for this result metadata.
4123 auto requestIter = mPendingRequestsList.begin();
4124 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4125 requestIter++;
4126 }
4127
4128 if (requestIter == mPendingRequestsList.end()) {
4129 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4130 return;
4131 }
4132
4133 // Update the result metadata
4134 requestIter->resultMetadata = resultMetadata;
4135
4136 // Check what type of request this is.
4137 bool liveRequest = false;
4138 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004139 // HDR+ request doesn't have partial results.
4140 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004141 } else if (requestIter->input_buffer != nullptr) {
4142 // Reprocessing request result is the same as settings.
4143 requestIter->resultMetadata = requestIter->settings;
4144 // Reprocessing request doesn't have partial results.
4145 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4146 } else {
4147 liveRequest = true;
4148 requestIter->partial_result_cnt++;
4149 mPendingLiveRequest--;
4150
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004151 {
4152 Mutex::Autolock l(gHdrPlusClientLock);
4153 // For a live request, send the metadata to HDR+ client.
4154 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4155 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4156 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4157 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004158 }
4159 }
4160
4161 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4162 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4163 bool readyToSend = true;
4164
4165 // Iterate through the pending requests to send out shutter callbacks and results that are
4166 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4167 // live requests that don't have result metadata yet.
4168 auto iter = mPendingRequestsList.begin();
4169 while (iter != mPendingRequestsList.end()) {
4170 // Check if current pending request is ready. If it's not ready, the following pending
4171 // requests are also not ready.
4172 if (readyToSend && iter->resultMetadata == nullptr) {
4173 readyToSend = false;
4174 }
4175
4176 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4177
4178 std::vector<camera3_stream_buffer_t> outputBuffers;
4179
4180 camera3_capture_result_t result = {};
4181 result.frame_number = iter->frame_number;
4182 result.result = iter->resultMetadata;
4183 result.partial_result = iter->partial_result_cnt;
4184
4185 // If this pending buffer has result metadata, we may be able to send out shutter callback
4186 // and result metadata.
4187 if (iter->resultMetadata != nullptr) {
4188 if (!readyToSend) {
4189 // If any of the previous pending request is not ready, this pending request is
4190 // also not ready to send in order to keep shutter callbacks and result metadata
4191 // in order.
4192 iter++;
4193 continue;
4194 }
4195
4196 // Invoke shutter callback if not yet.
4197 if (!iter->shutter_notified) {
4198 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4199
4200 // Find the timestamp in HDR+ result metadata
4201 camera_metadata_ro_entry_t entry;
4202 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4203 ANDROID_SENSOR_TIMESTAMP, &entry);
4204 if (res != OK) {
4205 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4206 __FUNCTION__, iter->frame_number, strerror(-res), res);
4207 } else {
4208 timestamp = entry.data.i64[0];
4209 }
4210
4211 camera3_notify_msg_t notify_msg = {};
4212 notify_msg.type = CAMERA3_MSG_SHUTTER;
4213 notify_msg.message.shutter.frame_number = iter->frame_number;
4214 notify_msg.message.shutter.timestamp = timestamp;
4215 orchestrateNotify(&notify_msg);
4216 iter->shutter_notified = true;
4217 }
4218
4219 result.input_buffer = iter->input_buffer;
4220
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004221 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4222 // If the result metadata belongs to a live request, notify errors for previous pending
4223 // live requests.
4224 mPendingLiveRequest--;
4225
4226 CameraMetadata dummyMetadata;
4227 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4228 result.result = dummyMetadata.release();
4229
4230 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004231
4232 // partial_result should be PARTIAL_RESULT_CNT in case of
4233 // ERROR_RESULT.
4234 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4235 result.partial_result = PARTIAL_RESULT_COUNT;
4236
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004237 } else {
4238 iter++;
4239 continue;
4240 }
4241
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004242 // Prepare output buffer array
4243 for (auto bufferInfoIter = iter->buffers.begin();
4244 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4245 if (bufferInfoIter->buffer != nullptr) {
4246
4247 QCamera3Channel *channel =
4248 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4249 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4250
4251 // Check if this buffer is a dropped frame.
4252 auto frameDropIter = mPendingFrameDropList.begin();
4253 while (frameDropIter != mPendingFrameDropList.end()) {
4254 if((frameDropIter->stream_ID == streamID) &&
4255 (frameDropIter->frame_number == frameNumber)) {
4256 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4257 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4258 streamID);
4259 mPendingFrameDropList.erase(frameDropIter);
4260 break;
4261 } else {
4262 frameDropIter++;
4263 }
4264 }
4265
4266 // Check buffer error status
4267 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4268 bufferInfoIter->buffer->buffer);
4269 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4270
4271 outputBuffers.push_back(*(bufferInfoIter->buffer));
4272 free(bufferInfoIter->buffer);
4273 bufferInfoIter->buffer = NULL;
4274 }
4275 }
4276
4277 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4278 result.num_output_buffers = outputBuffers.size();
4279
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004280 orchestrateResult(&result);
4281
4282 // For reprocessing, result metadata is the same as settings so do not free it here to
4283 // avoid double free.
4284 if (result.result != iter->settings) {
4285 free_camera_metadata((camera_metadata_t *)result.result);
4286 }
4287 iter->resultMetadata = nullptr;
4288 iter = erasePendingRequest(iter);
4289 }
4290
4291 if (liveRequest) {
4292 for (auto &iter : mPendingRequestsList) {
4293 // Increment pipeline depth for the following pending requests.
4294 if (iter.frame_number > frameNumber) {
4295 iter.pipeline_depth++;
4296 }
4297 }
4298 }
4299
4300 unblockRequestIfNecessary();
4301}
4302
Thierry Strudel3d639192016-09-09 11:52:26 -07004303/*===========================================================================
4304 * FUNCTION : unblockRequestIfNecessary
4305 *
4306 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4307 * that mMutex is held when this function is called.
4308 *
4309 * PARAMETERS :
4310 *
4311 * RETURN :
4312 *
4313 *==========================================================================*/
4314void QCamera3HardwareInterface::unblockRequestIfNecessary()
4315{
4316 // Unblock process_capture_request
4317 pthread_cond_signal(&mRequestCond);
4318}
4319
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004320/*===========================================================================
4321 * FUNCTION : isHdrSnapshotRequest
4322 *
4323 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4324 *
4325 * PARAMETERS : camera3 request structure
4326 *
4327 * RETURN : boolean decision variable
4328 *
4329 *==========================================================================*/
4330bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4331{
4332 if (request == NULL) {
4333 LOGE("Invalid request handle");
4334 assert(0);
4335 return false;
4336 }
4337
4338 if (!mForceHdrSnapshot) {
4339 CameraMetadata frame_settings;
4340 frame_settings = request->settings;
4341
4342 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4343 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4344 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4345 return false;
4346 }
4347 } else {
4348 return false;
4349 }
4350
4351 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4352 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4353 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4354 return false;
4355 }
4356 } else {
4357 return false;
4358 }
4359 }
4360
4361 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4362 if (request->output_buffers[i].stream->format
4363 == HAL_PIXEL_FORMAT_BLOB) {
4364 return true;
4365 }
4366 }
4367
4368 return false;
4369}
4370/*===========================================================================
4371 * FUNCTION : orchestrateRequest
4372 *
4373 * DESCRIPTION: Orchestrates a capture request from camera service
4374 *
4375 * PARAMETERS :
4376 * @request : request from framework to process
4377 *
4378 * RETURN : Error status codes
4379 *
4380 *==========================================================================*/
4381int32_t QCamera3HardwareInterface::orchestrateRequest(
4382 camera3_capture_request_t *request)
4383{
4384
4385 uint32_t originalFrameNumber = request->frame_number;
4386 uint32_t originalOutputCount = request->num_output_buffers;
4387 const camera_metadata_t *original_settings = request->settings;
4388 List<InternalRequest> internallyRequestedStreams;
4389 List<InternalRequest> emptyInternalList;
4390
4391 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4392 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4393 uint32_t internalFrameNumber;
4394 CameraMetadata modified_meta;
4395
4396
4397 /* Add Blob channel to list of internally requested streams */
4398 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4399 if (request->output_buffers[i].stream->format
4400 == HAL_PIXEL_FORMAT_BLOB) {
4401 InternalRequest streamRequested;
4402 streamRequested.meteringOnly = 1;
4403 streamRequested.need_metadata = 0;
4404 streamRequested.stream = request->output_buffers[i].stream;
4405 internallyRequestedStreams.push_back(streamRequested);
4406 }
4407 }
4408 request->num_output_buffers = 0;
4409 auto itr = internallyRequestedStreams.begin();
4410
4411 /* Modify setting to set compensation */
4412 modified_meta = request->settings;
4413 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4414 uint8_t aeLock = 1;
4415 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4416 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4417 camera_metadata_t *modified_settings = modified_meta.release();
4418 request->settings = modified_settings;
4419
4420 /* Capture Settling & -2x frame */
4421 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4422 request->frame_number = internalFrameNumber;
4423 processCaptureRequest(request, internallyRequestedStreams);
4424
4425 request->num_output_buffers = originalOutputCount;
4426 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4427 request->frame_number = internalFrameNumber;
4428 processCaptureRequest(request, emptyInternalList);
4429 request->num_output_buffers = 0;
4430
4431 modified_meta = modified_settings;
4432 expCompensation = 0;
4433 aeLock = 1;
4434 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4435 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4436 modified_settings = modified_meta.release();
4437 request->settings = modified_settings;
4438
4439 /* Capture Settling & 0X frame */
4440
4441 itr = internallyRequestedStreams.begin();
4442 if (itr == internallyRequestedStreams.end()) {
4443 LOGE("Error Internally Requested Stream list is empty");
4444 assert(0);
4445 } else {
4446 itr->need_metadata = 0;
4447 itr->meteringOnly = 1;
4448 }
4449
4450 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4451 request->frame_number = internalFrameNumber;
4452 processCaptureRequest(request, internallyRequestedStreams);
4453
4454 itr = internallyRequestedStreams.begin();
4455 if (itr == internallyRequestedStreams.end()) {
4456 ALOGE("Error Internally Requested Stream list is empty");
4457 assert(0);
4458 } else {
4459 itr->need_metadata = 1;
4460 itr->meteringOnly = 0;
4461 }
4462
4463 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4464 request->frame_number = internalFrameNumber;
4465 processCaptureRequest(request, internallyRequestedStreams);
4466
4467 /* Capture 2X frame*/
4468 modified_meta = modified_settings;
4469 expCompensation = GB_HDR_2X_STEP_EV;
4470 aeLock = 1;
4471 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4472 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4473 modified_settings = modified_meta.release();
4474 request->settings = modified_settings;
4475
4476 itr = internallyRequestedStreams.begin();
4477 if (itr == internallyRequestedStreams.end()) {
4478 ALOGE("Error Internally Requested Stream list is empty");
4479 assert(0);
4480 } else {
4481 itr->need_metadata = 0;
4482 itr->meteringOnly = 1;
4483 }
4484 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4485 request->frame_number = internalFrameNumber;
4486 processCaptureRequest(request, internallyRequestedStreams);
4487
4488 itr = internallyRequestedStreams.begin();
4489 if (itr == internallyRequestedStreams.end()) {
4490 ALOGE("Error Internally Requested Stream list is empty");
4491 assert(0);
4492 } else {
4493 itr->need_metadata = 1;
4494 itr->meteringOnly = 0;
4495 }
4496
4497 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4498 request->frame_number = internalFrameNumber;
4499 processCaptureRequest(request, internallyRequestedStreams);
4500
4501
4502 /* Capture 2X on original streaming config*/
4503 internallyRequestedStreams.clear();
4504
4505 /* Restore original settings pointer */
4506 request->settings = original_settings;
4507 } else {
4508 uint32_t internalFrameNumber;
4509 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4510 request->frame_number = internalFrameNumber;
4511 return processCaptureRequest(request, internallyRequestedStreams);
4512 }
4513
4514 return NO_ERROR;
4515}
4516
4517/*===========================================================================
4518 * FUNCTION : orchestrateResult
4519 *
4520 * DESCRIPTION: Orchestrates a capture result to camera service
4521 *
4522 * PARAMETERS :
4523 * @request : request from framework to process
4524 *
4525 * RETURN :
4526 *
4527 *==========================================================================*/
4528void QCamera3HardwareInterface::orchestrateResult(
4529 camera3_capture_result_t *result)
4530{
4531 uint32_t frameworkFrameNumber;
4532 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4533 frameworkFrameNumber);
4534 if (rc != NO_ERROR) {
4535 LOGE("Cannot find translated frameworkFrameNumber");
4536 assert(0);
4537 } else {
4538 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004539 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004540 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004541 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004542 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4543 camera_metadata_entry_t entry;
4544 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4545 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004546 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004547 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4548 if (ret != OK)
4549 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004550 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004551 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004552 result->frame_number = frameworkFrameNumber;
4553 mCallbackOps->process_capture_result(mCallbackOps, result);
4554 }
4555 }
4556}
4557
4558/*===========================================================================
4559 * FUNCTION : orchestrateNotify
4560 *
4561 * DESCRIPTION: Orchestrates a notify to camera service
4562 *
4563 * PARAMETERS :
4564 * @request : request from framework to process
4565 *
4566 * RETURN :
4567 *
4568 *==========================================================================*/
4569void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4570{
4571 uint32_t frameworkFrameNumber;
4572 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004573 int32_t rc = NO_ERROR;
4574
4575 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004576 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004577
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004578 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004579 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4580 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4581 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004582 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004583 LOGE("Cannot find translated frameworkFrameNumber");
4584 assert(0);
4585 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004586 }
4587 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004588
4589 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4590 LOGD("Internal Request drop the notifyCb");
4591 } else {
4592 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4593 mCallbackOps->notify(mCallbackOps, notify_msg);
4594 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004595}
4596
4597/*===========================================================================
4598 * FUNCTION : FrameNumberRegistry
4599 *
4600 * DESCRIPTION: Constructor
4601 *
4602 * PARAMETERS :
4603 *
4604 * RETURN :
4605 *
4606 *==========================================================================*/
4607FrameNumberRegistry::FrameNumberRegistry()
4608{
4609 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4610}
4611
4612/*===========================================================================
4613 * FUNCTION : ~FrameNumberRegistry
4614 *
4615 * DESCRIPTION: Destructor
4616 *
4617 * PARAMETERS :
4618 *
4619 * RETURN :
4620 *
4621 *==========================================================================*/
4622FrameNumberRegistry::~FrameNumberRegistry()
4623{
4624}
4625
4626/*===========================================================================
4627 * FUNCTION : PurgeOldEntriesLocked
4628 *
4629 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4630 *
4631 * PARAMETERS :
4632 *
4633 * RETURN : NONE
4634 *
4635 *==========================================================================*/
4636void FrameNumberRegistry::purgeOldEntriesLocked()
4637{
4638 while (_register.begin() != _register.end()) {
4639 auto itr = _register.begin();
4640 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4641 _register.erase(itr);
4642 } else {
4643 return;
4644 }
4645 }
4646}
4647
4648/*===========================================================================
4649 * FUNCTION : allocStoreInternalFrameNumber
4650 *
4651 * DESCRIPTION: Method to note down a framework request and associate a new
4652 * internal request number against it
4653 *
4654 * PARAMETERS :
4655 * @fFrameNumber: Identifier given by framework
4656 * @internalFN : Output parameter which will have the newly generated internal
4657 * entry
4658 *
4659 * RETURN : Error code
4660 *
4661 *==========================================================================*/
4662int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4663 uint32_t &internalFrameNumber)
4664{
4665 Mutex::Autolock lock(mRegistryLock);
4666 internalFrameNumber = _nextFreeInternalNumber++;
4667 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4668 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4669 purgeOldEntriesLocked();
4670 return NO_ERROR;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : generateStoreInternalFrameNumber
4675 *
4676 * DESCRIPTION: Method to associate a new internal request number independent
4677 * of any associate with framework requests
4678 *
4679 * PARAMETERS :
4680 * @internalFrame#: Output parameter which will have the newly generated internal
4681 *
4682 *
4683 * RETURN : Error code
4684 *
4685 *==========================================================================*/
4686int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4687{
4688 Mutex::Autolock lock(mRegistryLock);
4689 internalFrameNumber = _nextFreeInternalNumber++;
4690 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4691 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4692 purgeOldEntriesLocked();
4693 return NO_ERROR;
4694}
4695
4696/*===========================================================================
4697 * FUNCTION : getFrameworkFrameNumber
4698 *
4699 * DESCRIPTION: Method to query the framework framenumber given an internal #
4700 *
4701 * PARAMETERS :
4702 * @internalFrame#: Internal reference
4703 * @frameworkframenumber: Output parameter holding framework frame entry
4704 *
4705 * RETURN : Error code
4706 *
4707 *==========================================================================*/
4708int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4709 uint32_t &frameworkFrameNumber)
4710{
4711 Mutex::Autolock lock(mRegistryLock);
4712 auto itr = _register.find(internalFrameNumber);
4713 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004714 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004715 return -ENOENT;
4716 }
4717
4718 frameworkFrameNumber = itr->second;
4719 purgeOldEntriesLocked();
4720 return NO_ERROR;
4721}
Thierry Strudel3d639192016-09-09 11:52:26 -07004722
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004723status_t QCamera3HardwareInterface::fillPbStreamConfig(
4724 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4725 QCamera3Channel *channel, uint32_t streamIndex) {
4726 if (config == nullptr) {
4727 LOGE("%s: config is null", __FUNCTION__);
4728 return BAD_VALUE;
4729 }
4730
4731 if (channel == nullptr) {
4732 LOGE("%s: channel is null", __FUNCTION__);
4733 return BAD_VALUE;
4734 }
4735
4736 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4737 if (stream == nullptr) {
4738 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4739 return NAME_NOT_FOUND;
4740 }
4741
4742 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4743 if (streamInfo == nullptr) {
4744 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4745 return NAME_NOT_FOUND;
4746 }
4747
4748 config->id = pbStreamId;
4749 config->image.width = streamInfo->dim.width;
4750 config->image.height = streamInfo->dim.height;
4751 config->image.padding = 0;
4752 config->image.format = pbStreamFormat;
4753
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004754 uint32_t totalPlaneSize = 0;
4755
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004756 // Fill plane information.
4757 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4758 pbcamera::PlaneConfiguration plane;
4759 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4760 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4761 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004762
4763 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004764 }
4765
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004766 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004767 return OK;
4768}
4769
Thierry Strudel3d639192016-09-09 11:52:26 -07004770/*===========================================================================
4771 * FUNCTION : processCaptureRequest
4772 *
4773 * DESCRIPTION: process a capture request from camera service
4774 *
4775 * PARAMETERS :
4776 * @request : request from framework to process
4777 *
4778 * RETURN :
4779 *
4780 *==========================================================================*/
4781int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004782 camera3_capture_request_t *request,
4783 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004784{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004785 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004786 int rc = NO_ERROR;
4787 int32_t request_id;
4788 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 bool isVidBufRequested = false;
4790 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004791 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004792
4793 pthread_mutex_lock(&mMutex);
4794
4795 // Validate current state
4796 switch (mState) {
4797 case CONFIGURED:
4798 case STARTED:
4799 /* valid state */
4800 break;
4801
4802 case ERROR:
4803 pthread_mutex_unlock(&mMutex);
4804 handleCameraDeviceError();
4805 return -ENODEV;
4806
4807 default:
4808 LOGE("Invalid state %d", mState);
4809 pthread_mutex_unlock(&mMutex);
4810 return -ENODEV;
4811 }
4812
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004813 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 if (rc != NO_ERROR) {
4815 LOGE("incoming request is not valid");
4816 pthread_mutex_unlock(&mMutex);
4817 return rc;
4818 }
4819
4820 meta = request->settings;
4821
4822 // For first capture request, send capture intent, and
4823 // stream on all streams
4824 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004825 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004826 // send an unconfigure to the backend so that the isp
4827 // resources are deallocated
4828 if (!mFirstConfiguration) {
4829 cam_stream_size_info_t stream_config_info;
4830 int32_t hal_version = CAM_HAL_V3;
4831 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4832 stream_config_info.buffer_info.min_buffers =
4833 MIN_INFLIGHT_REQUESTS;
4834 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004835 m_bIs4KVideo ? 0 :
4836 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 clear_metadata_buffer(mParameters);
4838 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4839 CAM_INTF_PARM_HAL_VERSION, hal_version);
4840 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4841 CAM_INTF_META_STREAM_INFO, stream_config_info);
4842 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4843 mParameters);
4844 if (rc < 0) {
4845 LOGE("set_parms for unconfigure failed");
4846 pthread_mutex_unlock(&mMutex);
4847 return rc;
4848 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004849
Thierry Strudel3d639192016-09-09 11:52:26 -07004850 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004851 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004852 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004853 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004855 property_get("persist.camera.is_type", is_type_value, "4");
4856 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4857 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4858 property_get("persist.camera.is_type_preview", is_type_value, "4");
4859 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4860 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004861
4862 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4863 int32_t hal_version = CAM_HAL_V3;
4864 uint8_t captureIntent =
4865 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4866 mCaptureIntent = captureIntent;
4867 clear_metadata_buffer(mParameters);
4868 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4869 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4870 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004871 if (mFirstConfiguration) {
4872 // configure instant AEC
4873 // Instant AEC is a session based parameter and it is needed only
4874 // once per complete session after open camera.
4875 // i.e. This is set only once for the first capture request, after open camera.
4876 setInstantAEC(meta);
4877 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 uint8_t fwkVideoStabMode=0;
4879 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4880 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4881 }
4882
Xue Tuecac74e2017-04-17 13:58:15 -07004883 // If EIS setprop is enabled then only turn it on for video/preview
4884 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004885 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004886 int32_t vsMode;
4887 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4889 rc = BAD_VALUE;
4890 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004891 LOGD("setEis %d", setEis);
4892 bool eis3Supported = false;
4893 size_t count = IS_TYPE_MAX;
4894 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4895 for (size_t i = 0; i < count; i++) {
4896 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4897 eis3Supported = true;
4898 break;
4899 }
4900 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004901
4902 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4905 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4907 is_type = isTypePreview;
4908 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4909 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4910 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004911 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 } else {
4913 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 } else {
4916 is_type = IS_TYPE_NONE;
4917 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004918 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004919 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4921 }
4922 }
4923
4924 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4925 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4926
Thierry Strudel54dc9782017-02-15 12:12:10 -08004927 //Disable tintless only if the property is set to 0
4928 memset(prop, 0, sizeof(prop));
4929 property_get("persist.camera.tintless.enable", prop, "1");
4930 int32_t tintless_value = atoi(prop);
4931
Thierry Strudel3d639192016-09-09 11:52:26 -07004932 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4933 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004934
Thierry Strudel3d639192016-09-09 11:52:26 -07004935 //Disable CDS for HFR mode or if DIS/EIS is on.
4936 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4937 //after every configure_stream
4938 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4939 (m_bIsVideo)) {
4940 int32_t cds = CAM_CDS_MODE_OFF;
4941 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_CDS_MODE, cds))
4943 LOGE("Failed to disable CDS for HFR mode");
4944
4945 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004946
4947 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4948 uint8_t* use_av_timer = NULL;
4949
4950 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004951 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004952 use_av_timer = &m_debug_avtimer;
4953 }
4954 else{
4955 use_av_timer =
4956 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004957 if (use_av_timer) {
4958 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4959 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004960 }
4961
4962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4963 rc = BAD_VALUE;
4964 }
4965 }
4966
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 setMobicat();
4968
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004969 uint8_t nrMode = 0;
4970 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4971 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4972 }
4973
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 /* Set fps and hfr mode while sending meta stream info so that sensor
4975 * can configure appropriate streaming mode */
4976 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4978 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4980 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 if (rc == NO_ERROR) {
4982 int32_t max_fps =
4983 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004984 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004985 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4986 }
4987 /* For HFR, more buffers are dequeued upfront to improve the performance */
4988 if (mBatchSize) {
4989 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4990 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4991 }
4992 }
4993 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004994 LOGE("setHalFpsRange failed");
4995 }
4996 }
4997 if (meta.exists(ANDROID_CONTROL_MODE)) {
4998 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4999 rc = extractSceneMode(meta, metaMode, mParameters);
5000 if (rc != NO_ERROR) {
5001 LOGE("extractSceneMode failed");
5002 }
5003 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005004 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005005
Thierry Strudel04e026f2016-10-10 11:27:36 -07005006 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5007 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5008 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5009 rc = setVideoHdrMode(mParameters, vhdr);
5010 if (rc != NO_ERROR) {
5011 LOGE("setVideoHDR is failed");
5012 }
5013 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005014
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005015 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005016 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005017 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005018 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5020 sensorModeFullFov)) {
5021 rc = BAD_VALUE;
5022 }
5023 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005024 //TODO: validate the arguments, HSV scenemode should have only the
5025 //advertised fps ranges
5026
5027 /*set the capture intent, hal version, tintless, stream info,
5028 *and disenable parameters to the backend*/
5029 LOGD("set_parms META_STREAM_INFO " );
5030 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005031 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5032 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 mStreamConfigInfo.type[i],
5034 mStreamConfigInfo.stream_sizes[i].width,
5035 mStreamConfigInfo.stream_sizes[i].height,
5036 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005037 mStreamConfigInfo.format[i],
5038 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005039 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005040
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5042 mParameters);
5043 if (rc < 0) {
5044 LOGE("set_parms failed for hal version, stream info");
5045 }
5046
Chien-Yu Chenee335912017-02-09 17:53:20 -08005047 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5048 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005049 if (rc != NO_ERROR) {
5050 LOGE("Failed to get sensor output size");
5051 pthread_mutex_unlock(&mMutex);
5052 goto error_exit;
5053 }
5054
5055 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5056 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005057 mSensorModeInfo.active_array_size.width,
5058 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005059
5060 /* Set batchmode before initializing channel. Since registerBuffer
5061 * internally initializes some of the channels, better set batchmode
5062 * even before first register buffer */
5063 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5064 it != mStreamInfo.end(); it++) {
5065 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5066 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5067 && mBatchSize) {
5068 rc = channel->setBatchSize(mBatchSize);
5069 //Disable per frame map unmap for HFR/batchmode case
5070 rc |= channel->setPerFrameMapUnmap(false);
5071 if (NO_ERROR != rc) {
5072 LOGE("Channel init failed %d", rc);
5073 pthread_mutex_unlock(&mMutex);
5074 goto error_exit;
5075 }
5076 }
5077 }
5078
5079 //First initialize all streams
5080 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5081 it != mStreamInfo.end(); it++) {
5082 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005083
5084 /* Initial value of NR mode is needed before stream on */
5085 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005086 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5087 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005088 setEis) {
5089 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5090 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5091 is_type = mStreamConfigInfo.is_type[i];
5092 break;
5093 }
5094 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005096 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005097 rc = channel->initialize(IS_TYPE_NONE);
5098 }
5099 if (NO_ERROR != rc) {
5100 LOGE("Channel initialization failed %d", rc);
5101 pthread_mutex_unlock(&mMutex);
5102 goto error_exit;
5103 }
5104 }
5105
5106 if (mRawDumpChannel) {
5107 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5108 if (rc != NO_ERROR) {
5109 LOGE("Error: Raw Dump Channel init failed");
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005114 if (mHdrPlusRawSrcChannel) {
5115 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5116 if (rc != NO_ERROR) {
5117 LOGE("Error: HDR+ RAW Source Channel init failed");
5118 pthread_mutex_unlock(&mMutex);
5119 goto error_exit;
5120 }
5121 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 if (mSupportChannel) {
5123 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5124 if (rc < 0) {
5125 LOGE("Support channel initialization failed");
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130 if (mAnalysisChannel) {
5131 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5132 if (rc < 0) {
5133 LOGE("Analysis channel initialization failed");
5134 pthread_mutex_unlock(&mMutex);
5135 goto error_exit;
5136 }
5137 }
5138 if (mDummyBatchChannel) {
5139 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5140 if (rc < 0) {
5141 LOGE("mDummyBatchChannel setBatchSize failed");
5142 pthread_mutex_unlock(&mMutex);
5143 goto error_exit;
5144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005145 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005146 if (rc < 0) {
5147 LOGE("mDummyBatchChannel initialization failed");
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
5151 }
5152
5153 // Set bundle info
5154 rc = setBundleInfo();
5155 if (rc < 0) {
5156 LOGE("setBundleInfo failed %d", rc);
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160
5161 //update settings from app here
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5163 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5164 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5165 }
5166 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5167 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5168 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5169 }
5170 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5171 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5172 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5173
5174 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5175 (mLinkedCameraId != mCameraId) ) {
5176 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5177 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005178 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005179 goto error_exit;
5180 }
5181 }
5182
5183 // add bundle related cameras
5184 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5185 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005186 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5187 &m_pDualCamCmdPtr->bundle_info;
5188 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005189 if (mIsDeviceLinked)
5190 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5191 else
5192 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5193
5194 pthread_mutex_lock(&gCamLock);
5195
5196 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5197 LOGE("Dualcam: Invalid Session Id ");
5198 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005199 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 goto error_exit;
5201 }
5202
5203 if (mIsMainCamera == 1) {
5204 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5205 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005206 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005207 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 // related session id should be session id of linked session
5209 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5210 } else {
5211 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5212 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005213 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005214 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005215 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5216 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005217 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 pthread_mutex_unlock(&gCamLock);
5219
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005220 rc = mCameraHandle->ops->set_dual_cam_cmd(
5221 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005222 if (rc < 0) {
5223 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 goto error_exit;
5226 }
5227 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 goto no_error;
5229error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005230 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 return rc;
5232no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 mWokenUpByDaemon = false;
5234 mPendingLiveRequest = 0;
5235 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005236 }
5237
5238 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005239 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005240
5241 if (mFlushPerf) {
5242 //we cannot accept any requests during flush
5243 LOGE("process_capture_request cannot proceed during flush");
5244 pthread_mutex_unlock(&mMutex);
5245 return NO_ERROR; //should return an error
5246 }
5247
5248 if (meta.exists(ANDROID_REQUEST_ID)) {
5249 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5250 mCurrentRequestId = request_id;
5251 LOGD("Received request with id: %d", request_id);
5252 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5253 LOGE("Unable to find request id field, \
5254 & no previous id available");
5255 pthread_mutex_unlock(&mMutex);
5256 return NAME_NOT_FOUND;
5257 } else {
5258 LOGD("Re-using old request id");
5259 request_id = mCurrentRequestId;
5260 }
5261
5262 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5263 request->num_output_buffers,
5264 request->input_buffer,
5265 frameNumber);
5266 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005267 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005269 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005270 uint32_t snapshotStreamId = 0;
5271 for (size_t i = 0; i < request->num_output_buffers; i++) {
5272 const camera3_stream_buffer_t& output = request->output_buffers[i];
5273 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5274
Emilian Peev7650c122017-01-19 08:24:33 -08005275 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5276 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005277 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005278 blob_request = 1;
5279 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5280 }
5281
5282 if (output.acquire_fence != -1) {
5283 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5284 close(output.acquire_fence);
5285 if (rc != OK) {
5286 LOGE("sync wait failed %d", rc);
5287 pthread_mutex_unlock(&mMutex);
5288 return rc;
5289 }
5290 }
5291
Emilian Peev0f3c3162017-03-15 12:57:46 +00005292 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5293 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005294 depthRequestPresent = true;
5295 continue;
5296 }
5297
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005298 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005299 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005300
5301 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5302 isVidBufRequested = true;
5303 }
5304 }
5305
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005306 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5307 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5308 itr++) {
5309 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5310 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5311 channel->getStreamID(channel->getStreamTypeMask());
5312
5313 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5314 isVidBufRequested = true;
5315 }
5316 }
5317
Thierry Strudel3d639192016-09-09 11:52:26 -07005318 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005319 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005320 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005321 }
5322 if (blob_request && mRawDumpChannel) {
5323 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005324 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005325 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005326 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 }
5328
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005329 {
5330 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5331 // Request a RAW buffer if
5332 // 1. mHdrPlusRawSrcChannel is valid.
5333 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5334 // 3. There is no pending HDR+ request.
5335 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5336 mHdrPlusPendingRequests.size() == 0) {
5337 streamsArray.stream_request[streamsArray.num_streams].streamID =
5338 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5339 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5340 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005341 }
5342
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005343 //extract capture intent
5344 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5345 mCaptureIntent =
5346 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5347 }
5348
5349 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5350 mCacMode =
5351 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5352 }
5353
5354 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005355 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005356
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005357 {
5358 Mutex::Autolock l(gHdrPlusClientLock);
5359 // If this request has a still capture intent, try to submit an HDR+ request.
5360 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5361 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5362 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5363 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005364 }
5365
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005366 if (hdrPlusRequest) {
5367 // For a HDR+ request, just set the frame parameters.
5368 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5369 if (rc < 0) {
5370 LOGE("fail to set frame parameters");
5371 pthread_mutex_unlock(&mMutex);
5372 return rc;
5373 }
5374 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005375 /* Parse the settings:
5376 * - For every request in NORMAL MODE
5377 * - For every request in HFR mode during preview only case
5378 * - For first request of every batch in HFR mode during video
5379 * recording. In batchmode the same settings except frame number is
5380 * repeated in each request of the batch.
5381 */
5382 if (!mBatchSize ||
5383 (mBatchSize && !isVidBufRequested) ||
5384 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005385 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005386 if (rc < 0) {
5387 LOGE("fail to set frame parameters");
5388 pthread_mutex_unlock(&mMutex);
5389 return rc;
5390 }
5391 }
5392 /* For batchMode HFR, setFrameParameters is not called for every
5393 * request. But only frame number of the latest request is parsed.
5394 * Keep track of first and last frame numbers in a batch so that
5395 * metadata for the frame numbers of batch can be duplicated in
5396 * handleBatchMetadta */
5397 if (mBatchSize) {
5398 if (!mToBeQueuedVidBufs) {
5399 //start of the batch
5400 mFirstFrameNumberInBatch = request->frame_number;
5401 }
5402 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5403 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5404 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005405 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 return BAD_VALUE;
5407 }
5408 }
5409 if (mNeedSensorRestart) {
5410 /* Unlock the mutex as restartSensor waits on the channels to be
5411 * stopped, which in turn calls stream callback functions -
5412 * handleBufferWithLock and handleMetadataWithLock */
5413 pthread_mutex_unlock(&mMutex);
5414 rc = dynamicUpdateMetaStreamInfo();
5415 if (rc != NO_ERROR) {
5416 LOGE("Restarting the sensor failed");
5417 return BAD_VALUE;
5418 }
5419 mNeedSensorRestart = false;
5420 pthread_mutex_lock(&mMutex);
5421 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005422 if(mResetInstantAEC) {
5423 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5424 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5425 mResetInstantAEC = false;
5426 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005427 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005428 if (request->input_buffer->acquire_fence != -1) {
5429 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5430 close(request->input_buffer->acquire_fence);
5431 if (rc != OK) {
5432 LOGE("input buffer sync wait failed %d", rc);
5433 pthread_mutex_unlock(&mMutex);
5434 return rc;
5435 }
5436 }
5437 }
5438
5439 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5440 mLastCustIntentFrmNum = frameNumber;
5441 }
5442 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005443 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005444 pendingRequestIterator latestRequest;
5445 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005446 pendingRequest.num_buffers = depthRequestPresent ?
5447 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005448 pendingRequest.request_id = request_id;
5449 pendingRequest.blob_request = blob_request;
5450 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005451 if (request->input_buffer) {
5452 pendingRequest.input_buffer =
5453 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5454 *(pendingRequest.input_buffer) = *(request->input_buffer);
5455 pInputBuffer = pendingRequest.input_buffer;
5456 } else {
5457 pendingRequest.input_buffer = NULL;
5458 pInputBuffer = NULL;
5459 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005460 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005461
5462 pendingRequest.pipeline_depth = 0;
5463 pendingRequest.partial_result_cnt = 0;
5464 extractJpegMetadata(mCurJpegMeta, request);
5465 pendingRequest.jpegMetadata = mCurJpegMeta;
5466 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5467 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005468 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005469 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5470 mHybridAeEnable =
5471 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5472 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005473
5474 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5475 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005476 /* DevCamDebug metadata processCaptureRequest */
5477 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5478 mDevCamDebugMetaEnable =
5479 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5480 }
5481 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5482 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005483
5484 //extract CAC info
5485 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5486 mCacMode =
5487 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5488 }
5489 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005490 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005491
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005492 // extract enableZsl info
5493 if (gExposeEnableZslKey) {
5494 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5495 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5496 mZslEnabled = pendingRequest.enableZsl;
5497 } else {
5498 pendingRequest.enableZsl = mZslEnabled;
5499 }
5500 }
5501
Thierry Strudel3d639192016-09-09 11:52:26 -07005502 PendingBuffersInRequest bufsForCurRequest;
5503 bufsForCurRequest.frame_number = frameNumber;
5504 // Mark current timestamp for the new request
5505 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005506 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005507
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005508 if (hdrPlusRequest) {
5509 // Save settings for this request.
5510 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5511 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5512
5513 // Add to pending HDR+ request queue.
5514 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5515 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5516
5517 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5518 }
5519
Thierry Strudel3d639192016-09-09 11:52:26 -07005520 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005521 if ((request->output_buffers[i].stream->data_space ==
5522 HAL_DATASPACE_DEPTH) &&
5523 (HAL_PIXEL_FORMAT_BLOB ==
5524 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005525 continue;
5526 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005527 RequestedBufferInfo requestedBuf;
5528 memset(&requestedBuf, 0, sizeof(requestedBuf));
5529 requestedBuf.stream = request->output_buffers[i].stream;
5530 requestedBuf.buffer = NULL;
5531 pendingRequest.buffers.push_back(requestedBuf);
5532
5533 // Add to buffer handle the pending buffers list
5534 PendingBufferInfo bufferInfo;
5535 bufferInfo.buffer = request->output_buffers[i].buffer;
5536 bufferInfo.stream = request->output_buffers[i].stream;
5537 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5538 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5539 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5540 frameNumber, bufferInfo.buffer,
5541 channel->getStreamTypeMask(), bufferInfo.stream->format);
5542 }
5543 // Add this request packet into mPendingBuffersMap
5544 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5545 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5546 mPendingBuffersMap.get_num_overall_buffers());
5547
5548 latestRequest = mPendingRequestsList.insert(
5549 mPendingRequestsList.end(), pendingRequest);
5550 if(mFlush) {
5551 LOGI("mFlush is true");
5552 pthread_mutex_unlock(&mMutex);
5553 return NO_ERROR;
5554 }
5555
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005556 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5557 // channel.
5558 if (!hdrPlusRequest) {
5559 int indexUsed;
5560 // Notify metadata channel we receive a request
5561 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005562
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005563 if(request->input_buffer != NULL){
5564 LOGD("Input request, frame_number %d", frameNumber);
5565 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5566 if (NO_ERROR != rc) {
5567 LOGE("fail to set reproc parameters");
5568 pthread_mutex_unlock(&mMutex);
5569 return rc;
5570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005571 }
5572
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573 // Call request on other streams
5574 uint32_t streams_need_metadata = 0;
5575 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5576 for (size_t i = 0; i < request->num_output_buffers; i++) {
5577 const camera3_stream_buffer_t& output = request->output_buffers[i];
5578 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5579
5580 if (channel == NULL) {
5581 LOGW("invalid channel pointer for stream");
5582 continue;
5583 }
5584
5585 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5586 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5587 output.buffer, request->input_buffer, frameNumber);
5588 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005589 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005590 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5591 if (rc < 0) {
5592 LOGE("Fail to request on picture channel");
5593 pthread_mutex_unlock(&mMutex);
5594 return rc;
5595 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005596 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005597 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5598 assert(NULL != mDepthChannel);
5599 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005600
Emilian Peev7650c122017-01-19 08:24:33 -08005601 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5602 if (rc < 0) {
5603 LOGE("Fail to map on depth buffer");
5604 pthread_mutex_unlock(&mMutex);
5605 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005606 }
Emilian Peev7650c122017-01-19 08:24:33 -08005607 } else {
5608 LOGD("snapshot request with buffer %p, frame_number %d",
5609 output.buffer, frameNumber);
5610 if (!request->settings) {
5611 rc = channel->request(output.buffer, frameNumber,
5612 NULL, mPrevParameters, indexUsed);
5613 } else {
5614 rc = channel->request(output.buffer, frameNumber,
5615 NULL, mParameters, indexUsed);
5616 }
5617 if (rc < 0) {
5618 LOGE("Fail to request on picture channel");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
5621 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005622
Emilian Peev7650c122017-01-19 08:24:33 -08005623 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5624 uint32_t j = 0;
5625 for (j = 0; j < streamsArray.num_streams; j++) {
5626 if (streamsArray.stream_request[j].streamID == streamId) {
5627 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5628 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5629 else
5630 streamsArray.stream_request[j].buf_index = indexUsed;
5631 break;
5632 }
5633 }
5634 if (j == streamsArray.num_streams) {
5635 LOGE("Did not find matching stream to update index");
5636 assert(0);
5637 }
5638
5639 pendingBufferIter->need_metadata = true;
5640 streams_need_metadata++;
5641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005642 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005643 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5644 bool needMetadata = false;
5645 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5646 rc = yuvChannel->request(output.buffer, frameNumber,
5647 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5648 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005649 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005650 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 pthread_mutex_unlock(&mMutex);
5652 return rc;
5653 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005654
5655 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5656 uint32_t j = 0;
5657 for (j = 0; j < streamsArray.num_streams; j++) {
5658 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005659 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5660 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5661 else
5662 streamsArray.stream_request[j].buf_index = indexUsed;
5663 break;
5664 }
5665 }
5666 if (j == streamsArray.num_streams) {
5667 LOGE("Did not find matching stream to update index");
5668 assert(0);
5669 }
5670
5671 pendingBufferIter->need_metadata = needMetadata;
5672 if (needMetadata)
5673 streams_need_metadata += 1;
5674 LOGD("calling YUV channel request, need_metadata is %d",
5675 needMetadata);
5676 } else {
5677 LOGD("request with buffer %p, frame_number %d",
5678 output.buffer, frameNumber);
5679
5680 rc = channel->request(output.buffer, frameNumber, indexUsed);
5681
5682 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5683 uint32_t j = 0;
5684 for (j = 0; j < streamsArray.num_streams; j++) {
5685 if (streamsArray.stream_request[j].streamID == streamId) {
5686 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5687 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5688 else
5689 streamsArray.stream_request[j].buf_index = indexUsed;
5690 break;
5691 }
5692 }
5693 if (j == streamsArray.num_streams) {
5694 LOGE("Did not find matching stream to update index");
5695 assert(0);
5696 }
5697
5698 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5699 && mBatchSize) {
5700 mToBeQueuedVidBufs++;
5701 if (mToBeQueuedVidBufs == mBatchSize) {
5702 channel->queueBatchBuf();
5703 }
5704 }
5705 if (rc < 0) {
5706 LOGE("request failed");
5707 pthread_mutex_unlock(&mMutex);
5708 return rc;
5709 }
5710 }
5711 pendingBufferIter++;
5712 }
5713
5714 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5715 itr++) {
5716 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5717
5718 if (channel == NULL) {
5719 LOGE("invalid channel pointer for stream");
5720 assert(0);
5721 return BAD_VALUE;
5722 }
5723
5724 InternalRequest requestedStream;
5725 requestedStream = (*itr);
5726
5727
5728 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5729 LOGD("snapshot request internally input buffer %p, frame_number %d",
5730 request->input_buffer, frameNumber);
5731 if(request->input_buffer != NULL){
5732 rc = channel->request(NULL, frameNumber,
5733 pInputBuffer, &mReprocMeta, indexUsed, true,
5734 requestedStream.meteringOnly);
5735 if (rc < 0) {
5736 LOGE("Fail to request on picture channel");
5737 pthread_mutex_unlock(&mMutex);
5738 return rc;
5739 }
5740 } else {
5741 LOGD("snapshot request with frame_number %d", frameNumber);
5742 if (!request->settings) {
5743 rc = channel->request(NULL, frameNumber,
5744 NULL, mPrevParameters, indexUsed, true,
5745 requestedStream.meteringOnly);
5746 } else {
5747 rc = channel->request(NULL, frameNumber,
5748 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5749 }
5750 if (rc < 0) {
5751 LOGE("Fail to request on picture channel");
5752 pthread_mutex_unlock(&mMutex);
5753 return rc;
5754 }
5755
5756 if ((*itr).meteringOnly != 1) {
5757 requestedStream.need_metadata = 1;
5758 streams_need_metadata++;
5759 }
5760 }
5761
5762 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5763 uint32_t j = 0;
5764 for (j = 0; j < streamsArray.num_streams; j++) {
5765 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005766 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5767 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5768 else
5769 streamsArray.stream_request[j].buf_index = indexUsed;
5770 break;
5771 }
5772 }
5773 if (j == streamsArray.num_streams) {
5774 LOGE("Did not find matching stream to update index");
5775 assert(0);
5776 }
5777
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005778 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005779 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005780 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005781 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005782 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005783 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005784 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005785
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005786 //If 2 streams have need_metadata set to true, fail the request, unless
5787 //we copy/reference count the metadata buffer
5788 if (streams_need_metadata > 1) {
5789 LOGE("not supporting request in which two streams requires"
5790 " 2 HAL metadata for reprocessing");
5791 pthread_mutex_unlock(&mMutex);
5792 return -EINVAL;
5793 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005794
Emilian Peev7650c122017-01-19 08:24:33 -08005795 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5797 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5798 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5799 pthread_mutex_unlock(&mMutex);
5800 return BAD_VALUE;
5801 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005802 if (request->input_buffer == NULL) {
5803 /* Set the parameters to backend:
5804 * - For every request in NORMAL MODE
5805 * - For every request in HFR mode during preview only case
5806 * - Once every batch in HFR mode during video recording
5807 */
5808 if (!mBatchSize ||
5809 (mBatchSize && !isVidBufRequested) ||
5810 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5811 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5812 mBatchSize, isVidBufRequested,
5813 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005814
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005815 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5816 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5817 uint32_t m = 0;
5818 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5819 if (streamsArray.stream_request[k].streamID ==
5820 mBatchedStreamsArray.stream_request[m].streamID)
5821 break;
5822 }
5823 if (m == mBatchedStreamsArray.num_streams) {
5824 mBatchedStreamsArray.stream_request\
5825 [mBatchedStreamsArray.num_streams].streamID =
5826 streamsArray.stream_request[k].streamID;
5827 mBatchedStreamsArray.stream_request\
5828 [mBatchedStreamsArray.num_streams].buf_index =
5829 streamsArray.stream_request[k].buf_index;
5830 mBatchedStreamsArray.num_streams =
5831 mBatchedStreamsArray.num_streams + 1;
5832 }
5833 }
5834 streamsArray = mBatchedStreamsArray;
5835 }
5836 /* Update stream id of all the requested buffers */
5837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5838 streamsArray)) {
5839 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005840 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005841 return BAD_VALUE;
5842 }
5843
5844 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5845 mParameters);
5846 if (rc < 0) {
5847 LOGE("set_parms failed");
5848 }
5849 /* reset to zero coz, the batch is queued */
5850 mToBeQueuedVidBufs = 0;
5851 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5852 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5853 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005854 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5855 uint32_t m = 0;
5856 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5857 if (streamsArray.stream_request[k].streamID ==
5858 mBatchedStreamsArray.stream_request[m].streamID)
5859 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005860 }
5861 if (m == mBatchedStreamsArray.num_streams) {
5862 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5863 streamID = streamsArray.stream_request[k].streamID;
5864 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5865 buf_index = streamsArray.stream_request[k].buf_index;
5866 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5867 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005868 }
5869 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005870 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005871
5872 // Start all streams after the first setting is sent, so that the
5873 // setting can be applied sooner: (0 + apply_delay)th frame.
5874 if (mState == CONFIGURED && mChannelHandle) {
5875 //Then start them.
5876 LOGH("Start META Channel");
5877 rc = mMetadataChannel->start();
5878 if (rc < 0) {
5879 LOGE("META channel start failed");
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
5883
5884 if (mAnalysisChannel) {
5885 rc = mAnalysisChannel->start();
5886 if (rc < 0) {
5887 LOGE("Analysis channel start failed");
5888 mMetadataChannel->stop();
5889 pthread_mutex_unlock(&mMutex);
5890 return rc;
5891 }
5892 }
5893
5894 if (mSupportChannel) {
5895 rc = mSupportChannel->start();
5896 if (rc < 0) {
5897 LOGE("Support channel start failed");
5898 mMetadataChannel->stop();
5899 /* Although support and analysis are mutually exclusive today
5900 adding it in anycase for future proofing */
5901 if (mAnalysisChannel) {
5902 mAnalysisChannel->stop();
5903 }
5904 pthread_mutex_unlock(&mMutex);
5905 return rc;
5906 }
5907 }
5908 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5909 it != mStreamInfo.end(); it++) {
5910 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5911 LOGH("Start Processing Channel mask=%d",
5912 channel->getStreamTypeMask());
5913 rc = channel->start();
5914 if (rc < 0) {
5915 LOGE("channel start failed");
5916 pthread_mutex_unlock(&mMutex);
5917 return rc;
5918 }
5919 }
5920
5921 if (mRawDumpChannel) {
5922 LOGD("Starting raw dump stream");
5923 rc = mRawDumpChannel->start();
5924 if (rc != NO_ERROR) {
5925 LOGE("Error Starting Raw Dump Channel");
5926 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5927 it != mStreamInfo.end(); it++) {
5928 QCamera3Channel *channel =
5929 (QCamera3Channel *)(*it)->stream->priv;
5930 LOGH("Stopping Processing Channel mask=%d",
5931 channel->getStreamTypeMask());
5932 channel->stop();
5933 }
5934 if (mSupportChannel)
5935 mSupportChannel->stop();
5936 if (mAnalysisChannel) {
5937 mAnalysisChannel->stop();
5938 }
5939 mMetadataChannel->stop();
5940 pthread_mutex_unlock(&mMutex);
5941 return rc;
5942 }
5943 }
5944
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005945 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005946 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005947 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005948 if (rc != NO_ERROR) {
5949 LOGE("start_channel failed %d", rc);
5950 pthread_mutex_unlock(&mMutex);
5951 return rc;
5952 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005953
5954 {
5955 // Configure Easel for stream on.
5956 Mutex::Autolock l(gHdrPlusClientLock);
5957 if (EaselManagerClientOpened) {
5958 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5959 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5960 if (rc != OK) {
5961 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5962 mCameraId, mSensorModeInfo.op_pixel_clk);
5963 pthread_mutex_unlock(&mMutex);
5964 return rc;
5965 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005966 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005967 }
5968 }
5969
5970 // Start sensor streaming.
5971 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5972 mChannelHandle);
5973 if (rc != NO_ERROR) {
5974 LOGE("start_sensor_stream_on failed %d", rc);
5975 pthread_mutex_unlock(&mMutex);
5976 return rc;
5977 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005978 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005979 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005980 }
5981
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005982 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5983 {
5984 Mutex::Autolock l(gHdrPlusClientLock);
5985 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5986 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5987 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5988 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5989 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5990 rc = enableHdrPlusModeLocked();
5991 if (rc != OK) {
5992 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5993 pthread_mutex_unlock(&mMutex);
5994 return rc;
5995 }
5996
5997 mFirstPreviewIntentSeen = true;
5998 }
5999 }
6000
Thierry Strudel3d639192016-09-09 11:52:26 -07006001 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6002
6003 mState = STARTED;
6004 // Added a timed condition wait
6005 struct timespec ts;
6006 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006007 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006008 if (rc < 0) {
6009 isValidTimeout = 0;
6010 LOGE("Error reading the real time clock!!");
6011 }
6012 else {
6013 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006014 int64_t timeout = 5;
6015 {
6016 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6017 // If there is a pending HDR+ request, the following requests may be blocked until the
6018 // HDR+ request is done. So allow a longer timeout.
6019 if (mHdrPlusPendingRequests.size() > 0) {
6020 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6021 }
6022 }
6023 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006024 }
6025 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006026 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006027 (mState != ERROR) && (mState != DEINIT)) {
6028 if (!isValidTimeout) {
6029 LOGD("Blocking on conditional wait");
6030 pthread_cond_wait(&mRequestCond, &mMutex);
6031 }
6032 else {
6033 LOGD("Blocking on timed conditional wait");
6034 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6035 if (rc == ETIMEDOUT) {
6036 rc = -ENODEV;
6037 LOGE("Unblocked on timeout!!!!");
6038 break;
6039 }
6040 }
6041 LOGD("Unblocked");
6042 if (mWokenUpByDaemon) {
6043 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006044 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006045 break;
6046 }
6047 }
6048 pthread_mutex_unlock(&mMutex);
6049
6050 return rc;
6051}
6052
6053/*===========================================================================
6054 * FUNCTION : dump
6055 *
6056 * DESCRIPTION:
6057 *
6058 * PARAMETERS :
6059 *
6060 *
6061 * RETURN :
6062 *==========================================================================*/
6063void QCamera3HardwareInterface::dump(int fd)
6064{
6065 pthread_mutex_lock(&mMutex);
6066 dprintf(fd, "\n Camera HAL3 information Begin \n");
6067
6068 dprintf(fd, "\nNumber of pending requests: %zu \n",
6069 mPendingRequestsList.size());
6070 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6071 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6072 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6073 for(pendingRequestIterator i = mPendingRequestsList.begin();
6074 i != mPendingRequestsList.end(); i++) {
6075 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6076 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6077 i->input_buffer);
6078 }
6079 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6080 mPendingBuffersMap.get_num_overall_buffers());
6081 dprintf(fd, "-------+------------------\n");
6082 dprintf(fd, " Frame | Stream type mask \n");
6083 dprintf(fd, "-------+------------------\n");
6084 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6085 for(auto &j : req.mPendingBufferList) {
6086 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6087 dprintf(fd, " %5d | %11d \n",
6088 req.frame_number, channel->getStreamTypeMask());
6089 }
6090 }
6091 dprintf(fd, "-------+------------------\n");
6092
6093 dprintf(fd, "\nPending frame drop list: %zu\n",
6094 mPendingFrameDropList.size());
6095 dprintf(fd, "-------+-----------\n");
6096 dprintf(fd, " Frame | Stream ID \n");
6097 dprintf(fd, "-------+-----------\n");
6098 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6099 i != mPendingFrameDropList.end(); i++) {
6100 dprintf(fd, " %5d | %9d \n",
6101 i->frame_number, i->stream_ID);
6102 }
6103 dprintf(fd, "-------+-----------\n");
6104
6105 dprintf(fd, "\n Camera HAL3 information End \n");
6106
6107 /* use dumpsys media.camera as trigger to send update debug level event */
6108 mUpdateDebugLevel = true;
6109 pthread_mutex_unlock(&mMutex);
6110 return;
6111}
6112
6113/*===========================================================================
6114 * FUNCTION : flush
6115 *
6116 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6117 * conditionally restarts channels
6118 *
6119 * PARAMETERS :
6120 * @ restartChannels: re-start all channels
6121 *
6122 *
6123 * RETURN :
6124 * 0 on success
6125 * Error code on failure
6126 *==========================================================================*/
6127int QCamera3HardwareInterface::flush(bool restartChannels)
6128{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006129 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006130 int32_t rc = NO_ERROR;
6131
6132 LOGD("Unblocking Process Capture Request");
6133 pthread_mutex_lock(&mMutex);
6134 mFlush = true;
6135 pthread_mutex_unlock(&mMutex);
6136
6137 rc = stopAllChannels();
6138 // unlink of dualcam
6139 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006140 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6141 &m_pDualCamCmdPtr->bundle_info;
6142 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006143 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6144 pthread_mutex_lock(&gCamLock);
6145
6146 if (mIsMainCamera == 1) {
6147 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6148 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006149 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006150 // related session id should be session id of linked session
6151 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6152 } else {
6153 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6154 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006155 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006156 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6157 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006158 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 pthread_mutex_unlock(&gCamLock);
6160
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006161 rc = mCameraHandle->ops->set_dual_cam_cmd(
6162 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006163 if (rc < 0) {
6164 LOGE("Dualcam: Unlink failed, but still proceed to close");
6165 }
6166 }
6167
6168 if (rc < 0) {
6169 LOGE("stopAllChannels failed");
6170 return rc;
6171 }
6172 if (mChannelHandle) {
6173 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6174 mChannelHandle);
6175 }
6176
6177 // Reset bundle info
6178 rc = setBundleInfo();
6179 if (rc < 0) {
6180 LOGE("setBundleInfo failed %d", rc);
6181 return rc;
6182 }
6183
6184 // Mutex Lock
6185 pthread_mutex_lock(&mMutex);
6186
6187 // Unblock process_capture_request
6188 mPendingLiveRequest = 0;
6189 pthread_cond_signal(&mRequestCond);
6190
6191 rc = notifyErrorForPendingRequests();
6192 if (rc < 0) {
6193 LOGE("notifyErrorForPendingRequests failed");
6194 pthread_mutex_unlock(&mMutex);
6195 return rc;
6196 }
6197
6198 mFlush = false;
6199
6200 // Start the Streams/Channels
6201 if (restartChannels) {
6202 rc = startAllChannels();
6203 if (rc < 0) {
6204 LOGE("startAllChannels failed");
6205 pthread_mutex_unlock(&mMutex);
6206 return rc;
6207 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006208 if (mChannelHandle) {
6209 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006210 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006211 if (rc < 0) {
6212 LOGE("start_channel failed");
6213 pthread_mutex_unlock(&mMutex);
6214 return rc;
6215 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006216 }
6217 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006218 pthread_mutex_unlock(&mMutex);
6219
6220 return 0;
6221}
6222
6223/*===========================================================================
6224 * FUNCTION : flushPerf
6225 *
6226 * DESCRIPTION: This is the performance optimization version of flush that does
6227 * not use stream off, rather flushes the system
6228 *
6229 * PARAMETERS :
6230 *
6231 *
6232 * RETURN : 0 : success
6233 * -EINVAL: input is malformed (device is not valid)
6234 * -ENODEV: if the device has encountered a serious error
6235 *==========================================================================*/
6236int QCamera3HardwareInterface::flushPerf()
6237{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006238 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006239 int32_t rc = 0;
6240 struct timespec timeout;
6241 bool timed_wait = false;
6242
6243 pthread_mutex_lock(&mMutex);
6244 mFlushPerf = true;
6245 mPendingBuffersMap.numPendingBufsAtFlush =
6246 mPendingBuffersMap.get_num_overall_buffers();
6247 LOGD("Calling flush. Wait for %d buffers to return",
6248 mPendingBuffersMap.numPendingBufsAtFlush);
6249
6250 /* send the flush event to the backend */
6251 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6252 if (rc < 0) {
6253 LOGE("Error in flush: IOCTL failure");
6254 mFlushPerf = false;
6255 pthread_mutex_unlock(&mMutex);
6256 return -ENODEV;
6257 }
6258
6259 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6260 LOGD("No pending buffers in HAL, return flush");
6261 mFlushPerf = false;
6262 pthread_mutex_unlock(&mMutex);
6263 return rc;
6264 }
6265
6266 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006267 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006268 if (rc < 0) {
6269 LOGE("Error reading the real time clock, cannot use timed wait");
6270 } else {
6271 timeout.tv_sec += FLUSH_TIMEOUT;
6272 timed_wait = true;
6273 }
6274
6275 //Block on conditional variable
6276 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6277 LOGD("Waiting on mBuffersCond");
6278 if (!timed_wait) {
6279 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6280 if (rc != 0) {
6281 LOGE("pthread_cond_wait failed due to rc = %s",
6282 strerror(rc));
6283 break;
6284 }
6285 } else {
6286 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6287 if (rc != 0) {
6288 LOGE("pthread_cond_timedwait failed due to rc = %s",
6289 strerror(rc));
6290 break;
6291 }
6292 }
6293 }
6294 if (rc != 0) {
6295 mFlushPerf = false;
6296 pthread_mutex_unlock(&mMutex);
6297 return -ENODEV;
6298 }
6299
6300 LOGD("Received buffers, now safe to return them");
6301
6302 //make sure the channels handle flush
6303 //currently only required for the picture channel to release snapshot resources
6304 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6305 it != mStreamInfo.end(); it++) {
6306 QCamera3Channel *channel = (*it)->channel;
6307 if (channel) {
6308 rc = channel->flush();
6309 if (rc) {
6310 LOGE("Flushing the channels failed with error %d", rc);
6311 // even though the channel flush failed we need to continue and
6312 // return the buffers we have to the framework, however the return
6313 // value will be an error
6314 rc = -ENODEV;
6315 }
6316 }
6317 }
6318
6319 /* notify the frameworks and send errored results */
6320 rc = notifyErrorForPendingRequests();
6321 if (rc < 0) {
6322 LOGE("notifyErrorForPendingRequests failed");
6323 pthread_mutex_unlock(&mMutex);
6324 return rc;
6325 }
6326
6327 //unblock process_capture_request
6328 mPendingLiveRequest = 0;
6329 unblockRequestIfNecessary();
6330
6331 mFlushPerf = false;
6332 pthread_mutex_unlock(&mMutex);
6333 LOGD ("Flush Operation complete. rc = %d", rc);
6334 return rc;
6335}
6336
6337/*===========================================================================
6338 * FUNCTION : handleCameraDeviceError
6339 *
6340 * DESCRIPTION: This function calls internal flush and notifies the error to
6341 * framework and updates the state variable.
6342 *
6343 * PARAMETERS : None
6344 *
6345 * RETURN : NO_ERROR on Success
6346 * Error code on failure
6347 *==========================================================================*/
6348int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6349{
6350 int32_t rc = NO_ERROR;
6351
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006352 {
6353 Mutex::Autolock lock(mFlushLock);
6354 pthread_mutex_lock(&mMutex);
6355 if (mState != ERROR) {
6356 //if mState != ERROR, nothing to be done
6357 pthread_mutex_unlock(&mMutex);
6358 return NO_ERROR;
6359 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006361
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006362 rc = flush(false /* restart channels */);
6363 if (NO_ERROR != rc) {
6364 LOGE("internal flush to handle mState = ERROR failed");
6365 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006366
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006367 pthread_mutex_lock(&mMutex);
6368 mState = DEINIT;
6369 pthread_mutex_unlock(&mMutex);
6370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006371
6372 camera3_notify_msg_t notify_msg;
6373 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6374 notify_msg.type = CAMERA3_MSG_ERROR;
6375 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6376 notify_msg.message.error.error_stream = NULL;
6377 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006378 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006379
6380 return rc;
6381}
6382
6383/*===========================================================================
6384 * FUNCTION : captureResultCb
6385 *
6386 * DESCRIPTION: Callback handler for all capture result
6387 * (streams, as well as metadata)
6388 *
6389 * PARAMETERS :
6390 * @metadata : metadata information
6391 * @buffer : actual gralloc buffer to be returned to frameworks.
6392 * NULL if metadata.
6393 *
6394 * RETURN : NONE
6395 *==========================================================================*/
6396void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6397 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6398{
6399 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006400 pthread_mutex_lock(&mMutex);
6401 uint8_t batchSize = mBatchSize;
6402 pthread_mutex_unlock(&mMutex);
6403 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006404 handleBatchMetadata(metadata_buf,
6405 true /* free_and_bufdone_meta_buf */);
6406 } else { /* mBatchSize = 0 */
6407 hdrPlusPerfLock(metadata_buf);
6408 pthread_mutex_lock(&mMutex);
6409 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006410 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006411 true /* last urgent frame of batch metadata */,
6412 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006413 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006414 pthread_mutex_unlock(&mMutex);
6415 }
6416 } else if (isInputBuffer) {
6417 pthread_mutex_lock(&mMutex);
6418 handleInputBufferWithLock(frame_number);
6419 pthread_mutex_unlock(&mMutex);
6420 } else {
6421 pthread_mutex_lock(&mMutex);
6422 handleBufferWithLock(buffer, frame_number);
6423 pthread_mutex_unlock(&mMutex);
6424 }
6425 return;
6426}
6427
6428/*===========================================================================
6429 * FUNCTION : getReprocessibleOutputStreamId
6430 *
6431 * DESCRIPTION: Get source output stream id for the input reprocess stream
6432 * based on size and format, which would be the largest
6433 * output stream if an input stream exists.
6434 *
6435 * PARAMETERS :
6436 * @id : return the stream id if found
6437 *
6438 * RETURN : int32_t type of status
6439 * NO_ERROR -- success
6440 * none-zero failure code
6441 *==========================================================================*/
6442int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6443{
6444 /* check if any output or bidirectional stream with the same size and format
6445 and return that stream */
6446 if ((mInputStreamInfo.dim.width > 0) &&
6447 (mInputStreamInfo.dim.height > 0)) {
6448 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6449 it != mStreamInfo.end(); it++) {
6450
6451 camera3_stream_t *stream = (*it)->stream;
6452 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6453 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6454 (stream->format == mInputStreamInfo.format)) {
6455 // Usage flag for an input stream and the source output stream
6456 // may be different.
6457 LOGD("Found reprocessible output stream! %p", *it);
6458 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6459 stream->usage, mInputStreamInfo.usage);
6460
6461 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6462 if (channel != NULL && channel->mStreams[0]) {
6463 id = channel->mStreams[0]->getMyServerID();
6464 return NO_ERROR;
6465 }
6466 }
6467 }
6468 } else {
6469 LOGD("No input stream, so no reprocessible output stream");
6470 }
6471 return NAME_NOT_FOUND;
6472}
6473
6474/*===========================================================================
6475 * FUNCTION : lookupFwkName
6476 *
6477 * DESCRIPTION: In case the enum is not same in fwk and backend
6478 * make sure the parameter is correctly propogated
6479 *
6480 * PARAMETERS :
6481 * @arr : map between the two enums
6482 * @len : len of the map
6483 * @hal_name : name of the hal_parm to map
6484 *
6485 * RETURN : int type of status
6486 * fwk_name -- success
6487 * none-zero failure code
6488 *==========================================================================*/
6489template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6490 size_t len, halType hal_name)
6491{
6492
6493 for (size_t i = 0; i < len; i++) {
6494 if (arr[i].hal_name == hal_name) {
6495 return arr[i].fwk_name;
6496 }
6497 }
6498
6499 /* Not able to find matching framework type is not necessarily
6500 * an error case. This happens when mm-camera supports more attributes
6501 * than the frameworks do */
6502 LOGH("Cannot find matching framework type");
6503 return NAME_NOT_FOUND;
6504}
6505
6506/*===========================================================================
6507 * FUNCTION : lookupHalName
6508 *
6509 * DESCRIPTION: In case the enum is not same in fwk and backend
6510 * make sure the parameter is correctly propogated
6511 *
6512 * PARAMETERS :
6513 * @arr : map between the two enums
6514 * @len : len of the map
6515 * @fwk_name : name of the hal_parm to map
6516 *
6517 * RETURN : int32_t type of status
6518 * hal_name -- success
6519 * none-zero failure code
6520 *==========================================================================*/
6521template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6522 size_t len, fwkType fwk_name)
6523{
6524 for (size_t i = 0; i < len; i++) {
6525 if (arr[i].fwk_name == fwk_name) {
6526 return arr[i].hal_name;
6527 }
6528 }
6529
6530 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6531 return NAME_NOT_FOUND;
6532}
6533
6534/*===========================================================================
6535 * FUNCTION : lookupProp
6536 *
6537 * DESCRIPTION: lookup a value by its name
6538 *
6539 * PARAMETERS :
6540 * @arr : map between the two enums
6541 * @len : size of the map
6542 * @name : name to be looked up
6543 *
6544 * RETURN : Value if found
6545 * CAM_CDS_MODE_MAX if not found
6546 *==========================================================================*/
6547template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6548 size_t len, const char *name)
6549{
6550 if (name) {
6551 for (size_t i = 0; i < len; i++) {
6552 if (!strcmp(arr[i].desc, name)) {
6553 return arr[i].val;
6554 }
6555 }
6556 }
6557 return CAM_CDS_MODE_MAX;
6558}
6559
6560/*===========================================================================
6561 *
6562 * DESCRIPTION:
6563 *
6564 * PARAMETERS :
6565 * @metadata : metadata information from callback
6566 * @timestamp: metadata buffer timestamp
6567 * @request_id: request id
6568 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006569 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006570 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6571 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006572 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006573 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6574 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006575 *
6576 * RETURN : camera_metadata_t*
6577 * metadata in a format specified by fwk
6578 *==========================================================================*/
6579camera_metadata_t*
6580QCamera3HardwareInterface::translateFromHalMetadata(
6581 metadata_buffer_t *metadata,
6582 nsecs_t timestamp,
6583 int32_t request_id,
6584 const CameraMetadata& jpegMetadata,
6585 uint8_t pipeline_depth,
6586 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006587 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006588 /* DevCamDebug metadata translateFromHalMetadata argument */
6589 uint8_t DevCamDebug_meta_enable,
6590 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006591 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006592 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006593 bool lastMetadataInBatch,
6594 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006595{
6596 CameraMetadata camMetadata;
6597 camera_metadata_t *resultMetadata;
6598
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006599 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006600 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6601 * Timestamp is needed because it's used for shutter notify calculation.
6602 * */
6603 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6604 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006605 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006606 }
6607
Thierry Strudel3d639192016-09-09 11:52:26 -07006608 if (jpegMetadata.entryCount())
6609 camMetadata.append(jpegMetadata);
6610
6611 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6612 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6613 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6614 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006615 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006616 if (mBatchSize == 0) {
6617 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6618 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6619 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006620
Samuel Ha68ba5172016-12-15 18:41:12 -08006621 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6622 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6623 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6624 // DevCamDebug metadata translateFromHalMetadata AF
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6626 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6627 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6628 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6629 }
6630 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6631 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6632 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6633 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6636 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6637 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6638 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6641 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6642 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6643 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6644 }
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6646 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6647 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6648 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6649 }
6650 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6651 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6652 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6653 *DevCamDebug_af_monitor_pdaf_target_pos;
6654 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6655 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6658 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6659 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6660 *DevCamDebug_af_monitor_pdaf_confidence;
6661 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6662 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6665 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6666 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6667 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6668 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6669 }
6670 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6671 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6672 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6673 *DevCamDebug_af_monitor_tof_target_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6675 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6678 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6679 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6680 *DevCamDebug_af_monitor_tof_confidence;
6681 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6682 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6683 }
6684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6685 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6686 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6687 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6688 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6689 }
6690 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6691 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6692 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6693 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6694 &fwk_DevCamDebug_af_monitor_type_select, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6697 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6698 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6699 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6700 &fwk_DevCamDebug_af_monitor_refocus, 1);
6701 }
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6703 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6704 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6705 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6706 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6709 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6710 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6711 *DevCamDebug_af_search_pdaf_target_pos;
6712 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6713 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6714 }
6715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6716 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6717 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6718 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6719 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6720 }
6721 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6722 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6723 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6724 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6725 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6726 }
6727 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6728 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6729 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6730 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6731 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6734 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6735 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6736 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6737 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6738 }
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6740 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6741 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6742 *DevCamDebug_af_search_tof_target_pos;
6743 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6744 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6745 }
6746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6747 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6748 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6749 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6750 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6751 }
6752 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6753 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6754 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6755 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6756 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6759 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6760 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6761 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6762 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6765 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6766 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6767 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6768 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6769 }
6770 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6771 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6772 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6773 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6774 &fwk_DevCamDebug_af_search_type_select, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6777 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6778 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6779 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6780 &fwk_DevCamDebug_af_search_next_pos, 1);
6781 }
6782 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6783 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6784 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6785 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6786 &fwk_DevCamDebug_af_search_target_pos, 1);
6787 }
6788 // DevCamDebug metadata translateFromHalMetadata AEC
6789 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6790 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6791 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6792 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6793 }
6794 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6795 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6796 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6797 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6800 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6801 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6802 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6805 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6806 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6807 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6810 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6811 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6812 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6813 }
6814 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6815 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6816 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6817 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6820 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6821 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6822 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6823 }
6824 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6825 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6826 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6827 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6828 }
Samuel Ha34229982017-02-17 13:51:11 -08006829 // DevCamDebug metadata translateFromHalMetadata zzHDR
6830 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6831 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6832 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6833 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6836 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006837 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006838 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6839 }
6840 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6841 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6842 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6843 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6844 }
6845 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6846 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006847 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006848 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6849 }
6850 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6851 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6852 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6853 *DevCamDebug_aec_hdr_sensitivity_ratio;
6854 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6855 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6856 }
6857 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6858 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6859 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6860 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6861 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6862 }
6863 // DevCamDebug metadata translateFromHalMetadata ADRC
6864 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6865 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6866 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6867 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6868 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6869 }
6870 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6871 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6872 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6873 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6874 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6875 }
6876 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6877 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6878 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6879 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6882 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6883 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6884 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6885 }
6886 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6887 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6888 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6889 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6890 }
6891 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6892 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6893 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6894 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6895 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006896 // DevCamDebug metadata translateFromHalMetadata AWB
6897 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6898 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6899 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6900 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6901 }
6902 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6903 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6904 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6905 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6906 }
6907 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6908 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6909 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6910 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6911 }
6912 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6913 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6914 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6915 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6916 }
6917 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6918 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6919 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6920 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6921 }
6922 }
6923 // atrace_end(ATRACE_TAG_ALWAYS);
6924
Thierry Strudel3d639192016-09-09 11:52:26 -07006925 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6926 int64_t fwk_frame_number = *frame_number;
6927 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6928 }
6929
6930 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6931 int32_t fps_range[2];
6932 fps_range[0] = (int32_t)float_range->min_fps;
6933 fps_range[1] = (int32_t)float_range->max_fps;
6934 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6935 fps_range, 2);
6936 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6937 fps_range[0], fps_range[1]);
6938 }
6939
6940 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6941 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6942 }
6943
6944 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6945 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6946 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6947 *sceneMode);
6948 if (NAME_NOT_FOUND != val) {
6949 uint8_t fwkSceneMode = (uint8_t)val;
6950 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6951 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6952 fwkSceneMode);
6953 }
6954 }
6955
6956 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6957 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6958 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6959 }
6960
6961 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6962 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6963 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6964 }
6965
6966 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6967 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6968 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6969 }
6970
6971 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6972 CAM_INTF_META_EDGE_MODE, metadata) {
6973 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6974 }
6975
6976 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6977 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6978 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6979 }
6980
6981 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6982 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6983 }
6984
6985 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6986 if (0 <= *flashState) {
6987 uint8_t fwk_flashState = (uint8_t) *flashState;
6988 if (!gCamCapability[mCameraId]->flash_available) {
6989 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6990 }
6991 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6992 }
6993 }
6994
6995 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6996 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6997 if (NAME_NOT_FOUND != val) {
6998 uint8_t fwk_flashMode = (uint8_t)val;
6999 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7000 }
7001 }
7002
7003 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7004 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7005 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7006 }
7007
7008 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7009 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7010 }
7011
7012 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7013 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7014 }
7015
7016 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7017 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7018 }
7019
7020 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7021 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7022 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7023 }
7024
7025 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7026 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7027 LOGD("fwk_videoStab = %d", fwk_videoStab);
7028 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7029 } else {
7030 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7031 // and so hardcoding the Video Stab result to OFF mode.
7032 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7033 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007034 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007035 }
7036
7037 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7038 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7039 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7040 }
7041
7042 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7043 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7044 }
7045
Thierry Strudel3d639192016-09-09 11:52:26 -07007046 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7047 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007048 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007049
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007050 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7051 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007052
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007053 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007054 blackLevelAppliedPattern->cam_black_level[0],
7055 blackLevelAppliedPattern->cam_black_level[1],
7056 blackLevelAppliedPattern->cam_black_level[2],
7057 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007058 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7059 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007060
7061#ifndef USE_HAL_3_3
7062 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307063 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007064 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307065 fwk_blackLevelInd[0] /= 16.0;
7066 fwk_blackLevelInd[1] /= 16.0;
7067 fwk_blackLevelInd[2] /= 16.0;
7068 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007069 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7070 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007071#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007072 }
7073
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007074#ifndef USE_HAL_3_3
7075 // Fixed whitelevel is used by ISP/Sensor
7076 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7077 &gCamCapability[mCameraId]->white_level, 1);
7078#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007079
7080 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7081 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7082 int32_t scalerCropRegion[4];
7083 scalerCropRegion[0] = hScalerCropRegion->left;
7084 scalerCropRegion[1] = hScalerCropRegion->top;
7085 scalerCropRegion[2] = hScalerCropRegion->width;
7086 scalerCropRegion[3] = hScalerCropRegion->height;
7087
7088 // Adjust crop region from sensor output coordinate system to active
7089 // array coordinate system.
7090 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7091 scalerCropRegion[2], scalerCropRegion[3]);
7092
7093 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7094 }
7095
7096 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7097 LOGD("sensorExpTime = %lld", *sensorExpTime);
7098 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7099 }
7100
7101 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7102 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7103 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7104 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7105 }
7106
7107 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7108 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7109 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7110 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7111 sensorRollingShutterSkew, 1);
7112 }
7113
7114 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7115 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7116 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7117
7118 //calculate the noise profile based on sensitivity
7119 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7120 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7121 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7122 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7123 noise_profile[i] = noise_profile_S;
7124 noise_profile[i+1] = noise_profile_O;
7125 }
7126 LOGD("noise model entry (S, O) is (%f, %f)",
7127 noise_profile_S, noise_profile_O);
7128 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7129 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7130 }
7131
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007133 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007135 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007136 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007137 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7138 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7139 }
7140 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007141#endif
7142
Thierry Strudel3d639192016-09-09 11:52:26 -07007143 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7144 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7145 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7146 }
7147
7148 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7149 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7150 *faceDetectMode);
7151 if (NAME_NOT_FOUND != val) {
7152 uint8_t fwk_faceDetectMode = (uint8_t)val;
7153 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7154
7155 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7156 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7157 CAM_INTF_META_FACE_DETECTION, metadata) {
7158 uint8_t numFaces = MIN(
7159 faceDetectionInfo->num_faces_detected, MAX_ROI);
7160 int32_t faceIds[MAX_ROI];
7161 uint8_t faceScores[MAX_ROI];
7162 int32_t faceRectangles[MAX_ROI * 4];
7163 int32_t faceLandmarks[MAX_ROI * 6];
7164 size_t j = 0, k = 0;
7165
7166 for (size_t i = 0; i < numFaces; i++) {
7167 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7168 // Adjust crop region from sensor output coordinate system to active
7169 // array coordinate system.
7170 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7171 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7172 rect.width, rect.height);
7173
7174 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7175 faceRectangles+j, -1);
7176
Jason Lee8ce36fa2017-04-19 19:40:37 -07007177 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7178 "bottom-right (%d, %d)",
7179 faceDetectionInfo->frame_id, i,
7180 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7181 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7182
Thierry Strudel3d639192016-09-09 11:52:26 -07007183 j+= 4;
7184 }
7185 if (numFaces <= 0) {
7186 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7187 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7188 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7189 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7190 }
7191
7192 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7193 numFaces);
7194 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7195 faceRectangles, numFaces * 4U);
7196 if (fwk_faceDetectMode ==
7197 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7198 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7199 CAM_INTF_META_FACE_LANDMARK, metadata) {
7200
7201 for (size_t i = 0; i < numFaces; i++) {
7202 // Map the co-ordinate sensor output coordinate system to active
7203 // array coordinate system.
7204 mCropRegionMapper.toActiveArray(
7205 landmarks->face_landmarks[i].left_eye_center.x,
7206 landmarks->face_landmarks[i].left_eye_center.y);
7207 mCropRegionMapper.toActiveArray(
7208 landmarks->face_landmarks[i].right_eye_center.x,
7209 landmarks->face_landmarks[i].right_eye_center.y);
7210 mCropRegionMapper.toActiveArray(
7211 landmarks->face_landmarks[i].mouth_center.x,
7212 landmarks->face_landmarks[i].mouth_center.y);
7213
7214 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007215
7216 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7217 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7218 faceDetectionInfo->frame_id, i,
7219 faceLandmarks[k + LEFT_EYE_X],
7220 faceLandmarks[k + LEFT_EYE_Y],
7221 faceLandmarks[k + RIGHT_EYE_X],
7222 faceLandmarks[k + RIGHT_EYE_Y],
7223 faceLandmarks[k + MOUTH_X],
7224 faceLandmarks[k + MOUTH_Y]);
7225
Thierry Strudel04e026f2016-10-10 11:27:36 -07007226 k+= TOTAL_LANDMARK_INDICES;
7227 }
7228 } else {
7229 for (size_t i = 0; i < numFaces; i++) {
7230 setInvalidLandmarks(faceLandmarks+k);
7231 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007232 }
7233 }
7234
Jason Lee49619db2017-04-13 12:07:22 -07007235 for (size_t i = 0; i < numFaces; i++) {
7236 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7237
7238 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7239 faceDetectionInfo->frame_id, i, faceIds[i]);
7240 }
7241
Thierry Strudel3d639192016-09-09 11:52:26 -07007242 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7243 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7244 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007245 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007246 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7247 CAM_INTF_META_FACE_BLINK, metadata) {
7248 uint8_t detected[MAX_ROI];
7249 uint8_t degree[MAX_ROI * 2];
7250 for (size_t i = 0; i < numFaces; i++) {
7251 detected[i] = blinks->blink[i].blink_detected;
7252 degree[2 * i] = blinks->blink[i].left_blink;
7253 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007254
Jason Lee49619db2017-04-13 12:07:22 -07007255 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7256 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7257 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7258 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007259 }
7260 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7261 detected, numFaces);
7262 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7263 degree, numFaces * 2);
7264 }
7265 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7266 CAM_INTF_META_FACE_SMILE, metadata) {
7267 uint8_t degree[MAX_ROI];
7268 uint8_t confidence[MAX_ROI];
7269 for (size_t i = 0; i < numFaces; i++) {
7270 degree[i] = smiles->smile[i].smile_degree;
7271 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007272
Jason Lee49619db2017-04-13 12:07:22 -07007273 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7274 "smile_degree=%d, smile_score=%d",
7275 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007276 }
7277 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7278 degree, numFaces);
7279 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7280 confidence, numFaces);
7281 }
7282 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7283 CAM_INTF_META_FACE_GAZE, metadata) {
7284 int8_t angle[MAX_ROI];
7285 int32_t direction[MAX_ROI * 3];
7286 int8_t degree[MAX_ROI * 2];
7287 for (size_t i = 0; i < numFaces; i++) {
7288 angle[i] = gazes->gaze[i].gaze_angle;
7289 direction[3 * i] = gazes->gaze[i].updown_dir;
7290 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7291 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7292 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7293 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007294
7295 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7296 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7297 "left_right_gaze=%d, top_bottom_gaze=%d",
7298 faceDetectionInfo->frame_id, i, angle[i],
7299 direction[3 * i], direction[3 * i + 1],
7300 direction[3 * i + 2],
7301 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007302 }
7303 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7304 (uint8_t *)angle, numFaces);
7305 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7306 direction, numFaces * 3);
7307 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7308 (uint8_t *)degree, numFaces * 2);
7309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007310 }
7311 }
7312 }
7313 }
7314
7315 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7316 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007317 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007318 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007319 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007320
Shuzhen Wang14415f52016-11-16 18:26:18 -08007321 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7322 histogramBins = *histBins;
7323 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7324 }
7325
7326 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7328 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007329 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007330
7331 switch (stats_data->type) {
7332 case CAM_HISTOGRAM_TYPE_BAYER:
7333 switch (stats_data->bayer_stats.data_type) {
7334 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007335 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7336 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007337 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007338 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7339 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007340 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007341 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7342 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007343 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007344 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007345 case CAM_STATS_CHANNEL_R:
7346 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7348 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 }
7350 break;
7351 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007352 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007353 break;
7354 }
7355
Shuzhen Wang14415f52016-11-16 18:26:18 -08007356 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357 }
7358 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007359 }
7360
7361 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7362 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7363 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7364 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7365 }
7366
7367 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7368 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7369 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7370 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7371 }
7372
7373 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7374 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7375 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7376 CAM_MAX_SHADING_MAP_HEIGHT);
7377 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7378 CAM_MAX_SHADING_MAP_WIDTH);
7379 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7380 lensShadingMap->lens_shading, 4U * map_width * map_height);
7381 }
7382
7383 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7384 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7385 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7386 }
7387
7388 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7389 //Populate CAM_INTF_META_TONEMAP_CURVES
7390 /* ch0 = G, ch 1 = B, ch 2 = R*/
7391 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7392 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7393 tonemap->tonemap_points_cnt,
7394 CAM_MAX_TONEMAP_CURVE_SIZE);
7395 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7396 }
7397
7398 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7399 &tonemap->curves[0].tonemap_points[0][0],
7400 tonemap->tonemap_points_cnt * 2);
7401
7402 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7403 &tonemap->curves[1].tonemap_points[0][0],
7404 tonemap->tonemap_points_cnt * 2);
7405
7406 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7407 &tonemap->curves[2].tonemap_points[0][0],
7408 tonemap->tonemap_points_cnt * 2);
7409 }
7410
7411 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7412 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7413 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7414 CC_GAIN_MAX);
7415 }
7416
7417 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7418 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7419 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7420 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7421 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7422 }
7423
7424 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7425 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7426 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7427 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7428 toneCurve->tonemap_points_cnt,
7429 CAM_MAX_TONEMAP_CURVE_SIZE);
7430 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7431 }
7432 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7433 (float*)toneCurve->curve.tonemap_points,
7434 toneCurve->tonemap_points_cnt * 2);
7435 }
7436
7437 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7438 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7439 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7440 predColorCorrectionGains->gains, 4);
7441 }
7442
7443 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7444 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7445 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7446 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7447 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7448 }
7449
7450 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7451 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7452 }
7453
7454 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7455 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7456 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7457 }
7458
7459 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7460 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7461 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7462 }
7463
7464 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7465 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7466 *effectMode);
7467 if (NAME_NOT_FOUND != val) {
7468 uint8_t fwk_effectMode = (uint8_t)val;
7469 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7470 }
7471 }
7472
7473 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7474 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7475 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7476 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7477 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7478 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7479 }
7480 int32_t fwk_testPatternData[4];
7481 fwk_testPatternData[0] = testPatternData->r;
7482 fwk_testPatternData[3] = testPatternData->b;
7483 switch (gCamCapability[mCameraId]->color_arrangement) {
7484 case CAM_FILTER_ARRANGEMENT_RGGB:
7485 case CAM_FILTER_ARRANGEMENT_GRBG:
7486 fwk_testPatternData[1] = testPatternData->gr;
7487 fwk_testPatternData[2] = testPatternData->gb;
7488 break;
7489 case CAM_FILTER_ARRANGEMENT_GBRG:
7490 case CAM_FILTER_ARRANGEMENT_BGGR:
7491 fwk_testPatternData[2] = testPatternData->gr;
7492 fwk_testPatternData[1] = testPatternData->gb;
7493 break;
7494 default:
7495 LOGE("color arrangement %d is not supported",
7496 gCamCapability[mCameraId]->color_arrangement);
7497 break;
7498 }
7499 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7500 }
7501
7502 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7503 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7504 }
7505
7506 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7507 String8 str((const char *)gps_methods);
7508 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7509 }
7510
7511 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7512 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7513 }
7514
7515 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7516 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7517 }
7518
7519 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7520 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7521 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7522 }
7523
7524 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7525 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7526 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7527 }
7528
7529 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7530 int32_t fwk_thumb_size[2];
7531 fwk_thumb_size[0] = thumb_size->width;
7532 fwk_thumb_size[1] = thumb_size->height;
7533 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7534 }
7535
7536 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7537 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7538 privateData,
7539 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7540 }
7541
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007542 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007543 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007544 meteringMode, 1);
7545 }
7546
Thierry Strudel54dc9782017-02-15 12:12:10 -08007547 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7548 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7549 LOGD("hdr_scene_data: %d %f\n",
7550 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7551 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7552 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7553 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7554 &isHdr, 1);
7555 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7556 &isHdrConfidence, 1);
7557 }
7558
7559
7560
Thierry Strudel3d639192016-09-09 11:52:26 -07007561 if (metadata->is_tuning_params_valid) {
7562 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7563 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7564 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7565
7566
7567 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7568 sizeof(uint32_t));
7569 data += sizeof(uint32_t);
7570
7571 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7572 sizeof(uint32_t));
7573 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7574 data += sizeof(uint32_t);
7575
7576 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7577 sizeof(uint32_t));
7578 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7579 data += sizeof(uint32_t);
7580
7581 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7582 sizeof(uint32_t));
7583 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7584 data += sizeof(uint32_t);
7585
7586 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7587 sizeof(uint32_t));
7588 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7589 data += sizeof(uint32_t);
7590
7591 metadata->tuning_params.tuning_mod3_data_size = 0;
7592 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7593 sizeof(uint32_t));
7594 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7595 data += sizeof(uint32_t);
7596
7597 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7598 TUNING_SENSOR_DATA_MAX);
7599 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7600 count);
7601 data += count;
7602
7603 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7604 TUNING_VFE_DATA_MAX);
7605 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7606 count);
7607 data += count;
7608
7609 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7610 TUNING_CPP_DATA_MAX);
7611 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7612 count);
7613 data += count;
7614
7615 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7616 TUNING_CAC_DATA_MAX);
7617 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7618 count);
7619 data += count;
7620
7621 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7622 (int32_t *)(void *)tuning_meta_data_blob,
7623 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7624 }
7625
7626 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7627 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7628 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7629 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7630 NEUTRAL_COL_POINTS);
7631 }
7632
7633 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7634 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7635 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7636 }
7637
7638 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7639 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7640 // Adjust crop region from sensor output coordinate system to active
7641 // array coordinate system.
7642 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7643 hAeRegions->rect.width, hAeRegions->rect.height);
7644
7645 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7646 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7647 REGIONS_TUPLE_COUNT);
7648 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7649 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7650 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7651 hAeRegions->rect.height);
7652 }
7653
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007654 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7655 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7656 if (NAME_NOT_FOUND != val) {
7657 uint8_t fwkAfMode = (uint8_t)val;
7658 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7659 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7660 } else {
7661 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7662 val);
7663 }
7664 }
7665
Thierry Strudel3d639192016-09-09 11:52:26 -07007666 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7667 uint8_t fwk_afState = (uint8_t) *afState;
7668 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007669 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007670 }
7671
7672 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7673 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7674 }
7675
7676 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7677 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7678 }
7679
7680 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7681 uint8_t fwk_lensState = *lensState;
7682 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7683 }
7684
Thierry Strudel3d639192016-09-09 11:52:26 -07007685
7686 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007687 uint32_t ab_mode = *hal_ab_mode;
7688 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7689 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7690 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7691 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007692 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007693 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007694 if (NAME_NOT_FOUND != val) {
7695 uint8_t fwk_ab_mode = (uint8_t)val;
7696 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7697 }
7698 }
7699
7700 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7701 int val = lookupFwkName(SCENE_MODES_MAP,
7702 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7703 if (NAME_NOT_FOUND != val) {
7704 uint8_t fwkBestshotMode = (uint8_t)val;
7705 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7706 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7707 } else {
7708 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7709 }
7710 }
7711
7712 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7713 uint8_t fwk_mode = (uint8_t) *mode;
7714 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7715 }
7716
7717 /* Constant metadata values to be update*/
7718 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7719 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7720
7721 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7722 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7723
7724 int32_t hotPixelMap[2];
7725 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7726
7727 // CDS
7728 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7729 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7730 }
7731
Thierry Strudel04e026f2016-10-10 11:27:36 -07007732 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7733 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007734 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007735 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7736 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7737 } else {
7738 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7739 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007740
7741 if(fwk_hdr != curr_hdr_state) {
7742 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7743 if(fwk_hdr)
7744 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7745 else
7746 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7747 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007748 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7749 }
7750
Thierry Strudel54dc9782017-02-15 12:12:10 -08007751 //binning correction
7752 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7753 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7754 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7755 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7756 }
7757
Thierry Strudel04e026f2016-10-10 11:27:36 -07007758 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007759 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007760 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7761 int8_t is_ir_on = 0;
7762
7763 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7764 if(is_ir_on != curr_ir_state) {
7765 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7766 if(is_ir_on)
7767 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7768 else
7769 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7770 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007771 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007772 }
7773
Thierry Strudel269c81a2016-10-12 12:13:59 -07007774 // AEC SPEED
7775 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7776 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7777 }
7778
7779 // AWB SPEED
7780 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7781 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7782 }
7783
Thierry Strudel3d639192016-09-09 11:52:26 -07007784 // TNR
7785 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7786 uint8_t tnr_enable = tnr->denoise_enable;
7787 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007788 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7789 int8_t is_tnr_on = 0;
7790
7791 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7792 if(is_tnr_on != curr_tnr_state) {
7793 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7794 if(is_tnr_on)
7795 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7796 else
7797 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7798 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007799
7800 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7801 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7802 }
7803
7804 // Reprocess crop data
7805 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7806 uint8_t cnt = crop_data->num_of_streams;
7807 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7808 // mm-qcamera-daemon only posts crop_data for streams
7809 // not linked to pproc. So no valid crop metadata is not
7810 // necessarily an error case.
7811 LOGD("No valid crop metadata entries");
7812 } else {
7813 uint32_t reproc_stream_id;
7814 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7815 LOGD("No reprocessible stream found, ignore crop data");
7816 } else {
7817 int rc = NO_ERROR;
7818 Vector<int32_t> roi_map;
7819 int32_t *crop = new int32_t[cnt*4];
7820 if (NULL == crop) {
7821 rc = NO_MEMORY;
7822 }
7823 if (NO_ERROR == rc) {
7824 int32_t streams_found = 0;
7825 for (size_t i = 0; i < cnt; i++) {
7826 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7827 if (pprocDone) {
7828 // HAL already does internal reprocessing,
7829 // either via reprocessing before JPEG encoding,
7830 // or offline postprocessing for pproc bypass case.
7831 crop[0] = 0;
7832 crop[1] = 0;
7833 crop[2] = mInputStreamInfo.dim.width;
7834 crop[3] = mInputStreamInfo.dim.height;
7835 } else {
7836 crop[0] = crop_data->crop_info[i].crop.left;
7837 crop[1] = crop_data->crop_info[i].crop.top;
7838 crop[2] = crop_data->crop_info[i].crop.width;
7839 crop[3] = crop_data->crop_info[i].crop.height;
7840 }
7841 roi_map.add(crop_data->crop_info[i].roi_map.left);
7842 roi_map.add(crop_data->crop_info[i].roi_map.top);
7843 roi_map.add(crop_data->crop_info[i].roi_map.width);
7844 roi_map.add(crop_data->crop_info[i].roi_map.height);
7845 streams_found++;
7846 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7847 crop[0], crop[1], crop[2], crop[3]);
7848 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7849 crop_data->crop_info[i].roi_map.left,
7850 crop_data->crop_info[i].roi_map.top,
7851 crop_data->crop_info[i].roi_map.width,
7852 crop_data->crop_info[i].roi_map.height);
7853 break;
7854
7855 }
7856 }
7857 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7858 &streams_found, 1);
7859 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7860 crop, (size_t)(streams_found * 4));
7861 if (roi_map.array()) {
7862 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7863 roi_map.array(), roi_map.size());
7864 }
7865 }
7866 if (crop) {
7867 delete [] crop;
7868 }
7869 }
7870 }
7871 }
7872
7873 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7874 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7875 // so hardcoding the CAC result to OFF mode.
7876 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7877 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7878 } else {
7879 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7880 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7881 *cacMode);
7882 if (NAME_NOT_FOUND != val) {
7883 uint8_t resultCacMode = (uint8_t)val;
7884 // check whether CAC result from CB is equal to Framework set CAC mode
7885 // If not equal then set the CAC mode came in corresponding request
7886 if (fwk_cacMode != resultCacMode) {
7887 resultCacMode = fwk_cacMode;
7888 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007889 //Check if CAC is disabled by property
7890 if (m_cacModeDisabled) {
7891 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7892 }
7893
Thierry Strudel3d639192016-09-09 11:52:26 -07007894 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7895 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7896 } else {
7897 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7898 }
7899 }
7900 }
7901
7902 // Post blob of cam_cds_data through vendor tag.
7903 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7904 uint8_t cnt = cdsInfo->num_of_streams;
7905 cam_cds_data_t cdsDataOverride;
7906 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7907 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7908 cdsDataOverride.num_of_streams = 1;
7909 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7910 uint32_t reproc_stream_id;
7911 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7912 LOGD("No reprocessible stream found, ignore cds data");
7913 } else {
7914 for (size_t i = 0; i < cnt; i++) {
7915 if (cdsInfo->cds_info[i].stream_id ==
7916 reproc_stream_id) {
7917 cdsDataOverride.cds_info[0].cds_enable =
7918 cdsInfo->cds_info[i].cds_enable;
7919 break;
7920 }
7921 }
7922 }
7923 } else {
7924 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7925 }
7926 camMetadata.update(QCAMERA3_CDS_INFO,
7927 (uint8_t *)&cdsDataOverride,
7928 sizeof(cam_cds_data_t));
7929 }
7930
7931 // Ldaf calibration data
7932 if (!mLdafCalibExist) {
7933 IF_META_AVAILABLE(uint32_t, ldafCalib,
7934 CAM_INTF_META_LDAF_EXIF, metadata) {
7935 mLdafCalibExist = true;
7936 mLdafCalib[0] = ldafCalib[0];
7937 mLdafCalib[1] = ldafCalib[1];
7938 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7939 ldafCalib[0], ldafCalib[1]);
7940 }
7941 }
7942
Thierry Strudel54dc9782017-02-15 12:12:10 -08007943 // EXIF debug data through vendor tag
7944 /*
7945 * Mobicat Mask can assume 3 values:
7946 * 1 refers to Mobicat data,
7947 * 2 refers to Stats Debug and Exif Debug Data
7948 * 3 refers to Mobicat and Stats Debug Data
7949 * We want to make sure that we are sending Exif debug data
7950 * only when Mobicat Mask is 2.
7951 */
7952 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7953 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7954 (uint8_t *)(void *)mExifParams.debug_params,
7955 sizeof(mm_jpeg_debug_exif_params_t));
7956 }
7957
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007958 // Reprocess and DDM debug data through vendor tag
7959 cam_reprocess_info_t repro_info;
7960 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007961 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7962 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 }
7965 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7966 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007967 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007968 }
7969 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7970 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007971 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007972 }
7973 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7974 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007975 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007976 }
7977 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7978 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007979 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007980 }
7981 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007983 }
7984 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7985 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007986 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007988 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7989 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7990 }
7991 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7992 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7993 }
7994 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7995 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007996
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007997 // INSTANT AEC MODE
7998 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7999 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8000 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8001 }
8002
Shuzhen Wange763e802016-03-31 10:24:29 -07008003 // AF scene change
8004 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8005 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8006 }
8007
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008008 // Enable ZSL
8009 if (enableZsl != nullptr) {
8010 uint8_t value = *enableZsl ?
8011 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8012 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8013 }
8014
Thierry Strudel3d639192016-09-09 11:52:26 -07008015 resultMetadata = camMetadata.release();
8016 return resultMetadata;
8017}
8018
8019/*===========================================================================
8020 * FUNCTION : saveExifParams
8021 *
8022 * DESCRIPTION:
8023 *
8024 * PARAMETERS :
8025 * @metadata : metadata information from callback
8026 *
8027 * RETURN : none
8028 *
8029 *==========================================================================*/
8030void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8031{
8032 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8033 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8034 if (mExifParams.debug_params) {
8035 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8036 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8037 }
8038 }
8039 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8040 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8041 if (mExifParams.debug_params) {
8042 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8043 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8044 }
8045 }
8046 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8047 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8048 if (mExifParams.debug_params) {
8049 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8050 mExifParams.debug_params->af_debug_params_valid = TRUE;
8051 }
8052 }
8053 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8054 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8055 if (mExifParams.debug_params) {
8056 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8057 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8058 }
8059 }
8060 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8061 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8062 if (mExifParams.debug_params) {
8063 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8064 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8065 }
8066 }
8067 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8068 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8069 if (mExifParams.debug_params) {
8070 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8071 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8072 }
8073 }
8074 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8075 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8076 if (mExifParams.debug_params) {
8077 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8078 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8079 }
8080 }
8081 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8082 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8083 if (mExifParams.debug_params) {
8084 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8085 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8086 }
8087 }
8088}
8089
8090/*===========================================================================
8091 * FUNCTION : get3AExifParams
8092 *
8093 * DESCRIPTION:
8094 *
8095 * PARAMETERS : none
8096 *
8097 *
8098 * RETURN : mm_jpeg_exif_params_t
8099 *
8100 *==========================================================================*/
8101mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8102{
8103 return mExifParams;
8104}
8105
8106/*===========================================================================
8107 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8108 *
8109 * DESCRIPTION:
8110 *
8111 * PARAMETERS :
8112 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008113 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8114 * urgent metadata in a batch. Always true for
8115 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008116 *
8117 * RETURN : camera_metadata_t*
8118 * metadata in a format specified by fwk
8119 *==========================================================================*/
8120camera_metadata_t*
8121QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008122 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008123{
8124 CameraMetadata camMetadata;
8125 camera_metadata_t *resultMetadata;
8126
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008127 if (!lastUrgentMetadataInBatch) {
8128 /* In batch mode, use empty metadata if this is not the last in batch
8129 */
8130 resultMetadata = allocate_camera_metadata(0, 0);
8131 return resultMetadata;
8132 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008133
8134 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8135 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8136 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8137 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8138 }
8139
8140 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8141 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8142 &aecTrigger->trigger, 1);
8143 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8144 &aecTrigger->trigger_id, 1);
8145 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8146 aecTrigger->trigger);
8147 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8148 aecTrigger->trigger_id);
8149 }
8150
8151 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8152 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8153 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8154 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8155 }
8156
Thierry Strudel3d639192016-09-09 11:52:26 -07008157 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8158 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8159 &af_trigger->trigger, 1);
8160 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8161 af_trigger->trigger);
8162 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8163 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8164 af_trigger->trigger_id);
8165 }
8166
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008167 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8168 /*af regions*/
8169 int32_t afRegions[REGIONS_TUPLE_COUNT];
8170 // Adjust crop region from sensor output coordinate system to active
8171 // array coordinate system.
8172 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8173 hAfRegions->rect.width, hAfRegions->rect.height);
8174
8175 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8176 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8177 REGIONS_TUPLE_COUNT);
8178 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8179 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8180 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8181 hAfRegions->rect.height);
8182 }
8183
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008184 // AF region confidence
8185 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8186 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8187 }
8188
Thierry Strudel3d639192016-09-09 11:52:26 -07008189 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8190 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8191 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8192 if (NAME_NOT_FOUND != val) {
8193 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8194 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8195 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8196 } else {
8197 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8198 }
8199 }
8200
8201 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8202 uint32_t aeMode = CAM_AE_MODE_MAX;
8203 int32_t flashMode = CAM_FLASH_MODE_MAX;
8204 int32_t redeye = -1;
8205 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8206 aeMode = *pAeMode;
8207 }
8208 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8209 flashMode = *pFlashMode;
8210 }
8211 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8212 redeye = *pRedeye;
8213 }
8214
8215 if (1 == redeye) {
8216 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8217 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8218 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8219 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8220 flashMode);
8221 if (NAME_NOT_FOUND != val) {
8222 fwk_aeMode = (uint8_t)val;
8223 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8224 } else {
8225 LOGE("Unsupported flash mode %d", flashMode);
8226 }
8227 } else if (aeMode == CAM_AE_MODE_ON) {
8228 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8229 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8230 } else if (aeMode == CAM_AE_MODE_OFF) {
8231 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8232 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008233 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8234 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8235 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008236 } else {
8237 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8238 "flashMode:%d, aeMode:%u!!!",
8239 redeye, flashMode, aeMode);
8240 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008241 if (mInstantAEC) {
8242 // Increment frame Idx count untill a bound reached for instant AEC.
8243 mInstantAecFrameIdxCount++;
8244 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8245 CAM_INTF_META_AEC_INFO, metadata) {
8246 LOGH("ae_params->settled = %d",ae_params->settled);
8247 // If AEC settled, or if number of frames reached bound value,
8248 // should reset instant AEC.
8249 if (ae_params->settled ||
8250 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8251 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8252 mInstantAEC = false;
8253 mResetInstantAEC = true;
8254 mInstantAecFrameIdxCount = 0;
8255 }
8256 }
8257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008258 resultMetadata = camMetadata.release();
8259 return resultMetadata;
8260}
8261
8262/*===========================================================================
8263 * FUNCTION : dumpMetadataToFile
8264 *
8265 * DESCRIPTION: Dumps tuning metadata to file system
8266 *
8267 * PARAMETERS :
8268 * @meta : tuning metadata
8269 * @dumpFrameCount : current dump frame count
8270 * @enabled : Enable mask
8271 *
8272 *==========================================================================*/
8273void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8274 uint32_t &dumpFrameCount,
8275 bool enabled,
8276 const char *type,
8277 uint32_t frameNumber)
8278{
8279 //Some sanity checks
8280 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8281 LOGE("Tuning sensor data size bigger than expected %d: %d",
8282 meta.tuning_sensor_data_size,
8283 TUNING_SENSOR_DATA_MAX);
8284 return;
8285 }
8286
8287 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8288 LOGE("Tuning VFE data size bigger than expected %d: %d",
8289 meta.tuning_vfe_data_size,
8290 TUNING_VFE_DATA_MAX);
8291 return;
8292 }
8293
8294 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8295 LOGE("Tuning CPP data size bigger than expected %d: %d",
8296 meta.tuning_cpp_data_size,
8297 TUNING_CPP_DATA_MAX);
8298 return;
8299 }
8300
8301 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8302 LOGE("Tuning CAC data size bigger than expected %d: %d",
8303 meta.tuning_cac_data_size,
8304 TUNING_CAC_DATA_MAX);
8305 return;
8306 }
8307 //
8308
8309 if(enabled){
8310 char timeBuf[FILENAME_MAX];
8311 char buf[FILENAME_MAX];
8312 memset(buf, 0, sizeof(buf));
8313 memset(timeBuf, 0, sizeof(timeBuf));
8314 time_t current_time;
8315 struct tm * timeinfo;
8316 time (&current_time);
8317 timeinfo = localtime (&current_time);
8318 if (timeinfo != NULL) {
8319 strftime (timeBuf, sizeof(timeBuf),
8320 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8321 }
8322 String8 filePath(timeBuf);
8323 snprintf(buf,
8324 sizeof(buf),
8325 "%dm_%s_%d.bin",
8326 dumpFrameCount,
8327 type,
8328 frameNumber);
8329 filePath.append(buf);
8330 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8331 if (file_fd >= 0) {
8332 ssize_t written_len = 0;
8333 meta.tuning_data_version = TUNING_DATA_VERSION;
8334 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8335 written_len += write(file_fd, data, sizeof(uint32_t));
8336 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8337 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8338 written_len += write(file_fd, data, sizeof(uint32_t));
8339 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8340 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8341 written_len += write(file_fd, data, sizeof(uint32_t));
8342 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8343 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8344 written_len += write(file_fd, data, sizeof(uint32_t));
8345 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8346 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8347 written_len += write(file_fd, data, sizeof(uint32_t));
8348 meta.tuning_mod3_data_size = 0;
8349 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8350 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8351 written_len += write(file_fd, data, sizeof(uint32_t));
8352 size_t total_size = meta.tuning_sensor_data_size;
8353 data = (void *)((uint8_t *)&meta.data);
8354 written_len += write(file_fd, data, total_size);
8355 total_size = meta.tuning_vfe_data_size;
8356 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8357 written_len += write(file_fd, data, total_size);
8358 total_size = meta.tuning_cpp_data_size;
8359 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8360 written_len += write(file_fd, data, total_size);
8361 total_size = meta.tuning_cac_data_size;
8362 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8363 written_len += write(file_fd, data, total_size);
8364 close(file_fd);
8365 }else {
8366 LOGE("fail to open file for metadata dumping");
8367 }
8368 }
8369}
8370
8371/*===========================================================================
8372 * FUNCTION : cleanAndSortStreamInfo
8373 *
8374 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8375 * and sort them such that raw stream is at the end of the list
8376 * This is a workaround for camera daemon constraint.
8377 *
8378 * PARAMETERS : None
8379 *
8380 *==========================================================================*/
8381void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8382{
8383 List<stream_info_t *> newStreamInfo;
8384
8385 /*clean up invalid streams*/
8386 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8387 it != mStreamInfo.end();) {
8388 if(((*it)->status) == INVALID){
8389 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8390 delete channel;
8391 free(*it);
8392 it = mStreamInfo.erase(it);
8393 } else {
8394 it++;
8395 }
8396 }
8397
8398 // Move preview/video/callback/snapshot streams into newList
8399 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8400 it != mStreamInfo.end();) {
8401 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8402 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8403 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8404 newStreamInfo.push_back(*it);
8405 it = mStreamInfo.erase(it);
8406 } else
8407 it++;
8408 }
8409 // Move raw streams into newList
8410 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8411 it != mStreamInfo.end();) {
8412 newStreamInfo.push_back(*it);
8413 it = mStreamInfo.erase(it);
8414 }
8415
8416 mStreamInfo = newStreamInfo;
8417}
8418
8419/*===========================================================================
8420 * FUNCTION : extractJpegMetadata
8421 *
8422 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8423 * JPEG metadata is cached in HAL, and return as part of capture
8424 * result when metadata is returned from camera daemon.
8425 *
8426 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8427 * @request: capture request
8428 *
8429 *==========================================================================*/
8430void QCamera3HardwareInterface::extractJpegMetadata(
8431 CameraMetadata& jpegMetadata,
8432 const camera3_capture_request_t *request)
8433{
8434 CameraMetadata frame_settings;
8435 frame_settings = request->settings;
8436
8437 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8438 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8439 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8440 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8441
8442 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8443 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8444 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8445 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8446
8447 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8448 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8449 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8450 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8451
8452 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8453 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8454 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8455 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8456
8457 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8458 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8459 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8460 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8461
8462 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8463 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8464 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8465 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8466
8467 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8468 int32_t thumbnail_size[2];
8469 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8470 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8471 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8472 int32_t orientation =
8473 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008474 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008475 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8476 int32_t temp;
8477 temp = thumbnail_size[0];
8478 thumbnail_size[0] = thumbnail_size[1];
8479 thumbnail_size[1] = temp;
8480 }
8481 }
8482 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8483 thumbnail_size,
8484 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8485 }
8486
8487}
8488
8489/*===========================================================================
8490 * FUNCTION : convertToRegions
8491 *
8492 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8493 *
8494 * PARAMETERS :
8495 * @rect : cam_rect_t struct to convert
8496 * @region : int32_t destination array
8497 * @weight : if we are converting from cam_area_t, weight is valid
8498 * else weight = -1
8499 *
8500 *==========================================================================*/
8501void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8502 int32_t *region, int weight)
8503{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008504 region[FACE_LEFT] = rect.left;
8505 region[FACE_TOP] = rect.top;
8506 region[FACE_RIGHT] = rect.left + rect.width;
8507 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008508 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008509 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008510 }
8511}
8512
8513/*===========================================================================
8514 * FUNCTION : convertFromRegions
8515 *
8516 * DESCRIPTION: helper method to convert from array to cam_rect_t
8517 *
8518 * PARAMETERS :
8519 * @rect : cam_rect_t struct to convert
8520 * @region : int32_t destination array
8521 * @weight : if we are converting from cam_area_t, weight is valid
8522 * else weight = -1
8523 *
8524 *==========================================================================*/
8525void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008526 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008527{
Thierry Strudel3d639192016-09-09 11:52:26 -07008528 int32_t x_min = frame_settings.find(tag).data.i32[0];
8529 int32_t y_min = frame_settings.find(tag).data.i32[1];
8530 int32_t x_max = frame_settings.find(tag).data.i32[2];
8531 int32_t y_max = frame_settings.find(tag).data.i32[3];
8532 roi.weight = frame_settings.find(tag).data.i32[4];
8533 roi.rect.left = x_min;
8534 roi.rect.top = y_min;
8535 roi.rect.width = x_max - x_min;
8536 roi.rect.height = y_max - y_min;
8537}
8538
8539/*===========================================================================
8540 * FUNCTION : resetIfNeededROI
8541 *
8542 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8543 * crop region
8544 *
8545 * PARAMETERS :
8546 * @roi : cam_area_t struct to resize
8547 * @scalerCropRegion : cam_crop_region_t region to compare against
8548 *
8549 *
8550 *==========================================================================*/
8551bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8552 const cam_crop_region_t* scalerCropRegion)
8553{
8554 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8555 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8556 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8557 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8558
8559 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8560 * without having this check the calculations below to validate if the roi
8561 * is inside scalar crop region will fail resulting in the roi not being
8562 * reset causing algorithm to continue to use stale roi window
8563 */
8564 if (roi->weight == 0) {
8565 return true;
8566 }
8567
8568 if ((roi_x_max < scalerCropRegion->left) ||
8569 // right edge of roi window is left of scalar crop's left edge
8570 (roi_y_max < scalerCropRegion->top) ||
8571 // bottom edge of roi window is above scalar crop's top edge
8572 (roi->rect.left > crop_x_max) ||
8573 // left edge of roi window is beyond(right) of scalar crop's right edge
8574 (roi->rect.top > crop_y_max)){
8575 // top edge of roi windo is above scalar crop's top edge
8576 return false;
8577 }
8578 if (roi->rect.left < scalerCropRegion->left) {
8579 roi->rect.left = scalerCropRegion->left;
8580 }
8581 if (roi->rect.top < scalerCropRegion->top) {
8582 roi->rect.top = scalerCropRegion->top;
8583 }
8584 if (roi_x_max > crop_x_max) {
8585 roi_x_max = crop_x_max;
8586 }
8587 if (roi_y_max > crop_y_max) {
8588 roi_y_max = crop_y_max;
8589 }
8590 roi->rect.width = roi_x_max - roi->rect.left;
8591 roi->rect.height = roi_y_max - roi->rect.top;
8592 return true;
8593}
8594
8595/*===========================================================================
8596 * FUNCTION : convertLandmarks
8597 *
8598 * DESCRIPTION: helper method to extract the landmarks from face detection info
8599 *
8600 * PARAMETERS :
8601 * @landmark_data : input landmark data to be converted
8602 * @landmarks : int32_t destination array
8603 *
8604 *
8605 *==========================================================================*/
8606void QCamera3HardwareInterface::convertLandmarks(
8607 cam_face_landmarks_info_t landmark_data,
8608 int32_t *landmarks)
8609{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008610 if (landmark_data.is_left_eye_valid) {
8611 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8612 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8613 } else {
8614 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8615 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8616 }
8617
8618 if (landmark_data.is_right_eye_valid) {
8619 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8620 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8621 } else {
8622 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8623 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8624 }
8625
8626 if (landmark_data.is_mouth_valid) {
8627 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8628 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8629 } else {
8630 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8631 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8632 }
8633}
8634
8635/*===========================================================================
8636 * FUNCTION : setInvalidLandmarks
8637 *
8638 * DESCRIPTION: helper method to set invalid landmarks
8639 *
8640 * PARAMETERS :
8641 * @landmarks : int32_t destination array
8642 *
8643 *
8644 *==========================================================================*/
8645void QCamera3HardwareInterface::setInvalidLandmarks(
8646 int32_t *landmarks)
8647{
8648 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8649 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8650 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8651 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8652 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8653 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008654}
8655
8656#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008657
8658/*===========================================================================
8659 * FUNCTION : getCapabilities
8660 *
8661 * DESCRIPTION: query camera capability from back-end
8662 *
8663 * PARAMETERS :
8664 * @ops : mm-interface ops structure
8665 * @cam_handle : camera handle for which we need capability
8666 *
8667 * RETURN : ptr type of capability structure
8668 * capability for success
8669 * NULL for failure
8670 *==========================================================================*/
8671cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8672 uint32_t cam_handle)
8673{
8674 int rc = NO_ERROR;
8675 QCamera3HeapMemory *capabilityHeap = NULL;
8676 cam_capability_t *cap_ptr = NULL;
8677
8678 if (ops == NULL) {
8679 LOGE("Invalid arguments");
8680 return NULL;
8681 }
8682
8683 capabilityHeap = new QCamera3HeapMemory(1);
8684 if (capabilityHeap == NULL) {
8685 LOGE("creation of capabilityHeap failed");
8686 return NULL;
8687 }
8688
8689 /* Allocate memory for capability buffer */
8690 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8691 if(rc != OK) {
8692 LOGE("No memory for cappability");
8693 goto allocate_failed;
8694 }
8695
8696 /* Map memory for capability buffer */
8697 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8698
8699 rc = ops->map_buf(cam_handle,
8700 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8701 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8702 if(rc < 0) {
8703 LOGE("failed to map capability buffer");
8704 rc = FAILED_TRANSACTION;
8705 goto map_failed;
8706 }
8707
8708 /* Query Capability */
8709 rc = ops->query_capability(cam_handle);
8710 if(rc < 0) {
8711 LOGE("failed to query capability");
8712 rc = FAILED_TRANSACTION;
8713 goto query_failed;
8714 }
8715
8716 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8717 if (cap_ptr == NULL) {
8718 LOGE("out of memory");
8719 rc = NO_MEMORY;
8720 goto query_failed;
8721 }
8722
8723 memset(cap_ptr, 0, sizeof(cam_capability_t));
8724 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8725
8726 int index;
8727 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8728 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8729 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8730 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8731 }
8732
8733query_failed:
8734 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8735map_failed:
8736 capabilityHeap->deallocate();
8737allocate_failed:
8738 delete capabilityHeap;
8739
8740 if (rc != NO_ERROR) {
8741 return NULL;
8742 } else {
8743 return cap_ptr;
8744 }
8745}
8746
Thierry Strudel3d639192016-09-09 11:52:26 -07008747/*===========================================================================
8748 * FUNCTION : initCapabilities
8749 *
8750 * DESCRIPTION: initialize camera capabilities in static data struct
8751 *
8752 * PARAMETERS :
8753 * @cameraId : camera Id
8754 *
8755 * RETURN : int32_t type of status
8756 * NO_ERROR -- success
8757 * none-zero failure code
8758 *==========================================================================*/
8759int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8760{
8761 int rc = 0;
8762 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008763 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008764
8765 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8766 if (rc) {
8767 LOGE("camera_open failed. rc = %d", rc);
8768 goto open_failed;
8769 }
8770 if (!cameraHandle) {
8771 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8772 goto open_failed;
8773 }
8774
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008775 handle = get_main_camera_handle(cameraHandle->camera_handle);
8776 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8777 if (gCamCapability[cameraId] == NULL) {
8778 rc = FAILED_TRANSACTION;
8779 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008780 }
8781
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008782 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008783 if (is_dual_camera_by_idx(cameraId)) {
8784 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8785 gCamCapability[cameraId]->aux_cam_cap =
8786 getCapabilities(cameraHandle->ops, handle);
8787 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8788 rc = FAILED_TRANSACTION;
8789 free(gCamCapability[cameraId]);
8790 goto failed_op;
8791 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008792
8793 // Copy the main camera capability to main_cam_cap struct
8794 gCamCapability[cameraId]->main_cam_cap =
8795 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8796 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8797 LOGE("out of memory");
8798 rc = NO_MEMORY;
8799 goto failed_op;
8800 }
8801 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8802 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008803 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008804failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008805 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8806 cameraHandle = NULL;
8807open_failed:
8808 return rc;
8809}
8810
8811/*==========================================================================
8812 * FUNCTION : get3Aversion
8813 *
8814 * DESCRIPTION: get the Q3A S/W version
8815 *
8816 * PARAMETERS :
8817 * @sw_version: Reference of Q3A structure which will hold version info upon
8818 * return
8819 *
8820 * RETURN : None
8821 *
8822 *==========================================================================*/
8823void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8824{
8825 if(gCamCapability[mCameraId])
8826 sw_version = gCamCapability[mCameraId]->q3a_version;
8827 else
8828 LOGE("Capability structure NULL!");
8829}
8830
8831
8832/*===========================================================================
8833 * FUNCTION : initParameters
8834 *
8835 * DESCRIPTION: initialize camera parameters
8836 *
8837 * PARAMETERS :
8838 *
8839 * RETURN : int32_t type of status
8840 * NO_ERROR -- success
8841 * none-zero failure code
8842 *==========================================================================*/
8843int QCamera3HardwareInterface::initParameters()
8844{
8845 int rc = 0;
8846
8847 //Allocate Set Param Buffer
8848 mParamHeap = new QCamera3HeapMemory(1);
8849 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8850 if(rc != OK) {
8851 rc = NO_MEMORY;
8852 LOGE("Failed to allocate SETPARM Heap memory");
8853 delete mParamHeap;
8854 mParamHeap = NULL;
8855 return rc;
8856 }
8857
8858 //Map memory for parameters buffer
8859 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8860 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8861 mParamHeap->getFd(0),
8862 sizeof(metadata_buffer_t),
8863 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8864 if(rc < 0) {
8865 LOGE("failed to map SETPARM buffer");
8866 rc = FAILED_TRANSACTION;
8867 mParamHeap->deallocate();
8868 delete mParamHeap;
8869 mParamHeap = NULL;
8870 return rc;
8871 }
8872
8873 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8874
8875 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8876 return rc;
8877}
8878
8879/*===========================================================================
8880 * FUNCTION : deinitParameters
8881 *
8882 * DESCRIPTION: de-initialize camera parameters
8883 *
8884 * PARAMETERS :
8885 *
8886 * RETURN : NONE
8887 *==========================================================================*/
8888void QCamera3HardwareInterface::deinitParameters()
8889{
8890 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8891 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8892
8893 mParamHeap->deallocate();
8894 delete mParamHeap;
8895 mParamHeap = NULL;
8896
8897 mParameters = NULL;
8898
8899 free(mPrevParameters);
8900 mPrevParameters = NULL;
8901}
8902
8903/*===========================================================================
8904 * FUNCTION : calcMaxJpegSize
8905 *
8906 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8907 *
8908 * PARAMETERS :
8909 *
8910 * RETURN : max_jpeg_size
8911 *==========================================================================*/
8912size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8913{
8914 size_t max_jpeg_size = 0;
8915 size_t temp_width, temp_height;
8916 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8917 MAX_SIZES_CNT);
8918 for (size_t i = 0; i < count; i++) {
8919 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8920 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8921 if (temp_width * temp_height > max_jpeg_size ) {
8922 max_jpeg_size = temp_width * temp_height;
8923 }
8924 }
8925 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8926 return max_jpeg_size;
8927}
8928
8929/*===========================================================================
8930 * FUNCTION : getMaxRawSize
8931 *
8932 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8933 *
8934 * PARAMETERS :
8935 *
8936 * RETURN : Largest supported Raw Dimension
8937 *==========================================================================*/
8938cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8939{
8940 int max_width = 0;
8941 cam_dimension_t maxRawSize;
8942
8943 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8944 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8945 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8946 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8947 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8948 }
8949 }
8950 return maxRawSize;
8951}
8952
8953
8954/*===========================================================================
8955 * FUNCTION : calcMaxJpegDim
8956 *
8957 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8958 *
8959 * PARAMETERS :
8960 *
8961 * RETURN : max_jpeg_dim
8962 *==========================================================================*/
8963cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8964{
8965 cam_dimension_t max_jpeg_dim;
8966 cam_dimension_t curr_jpeg_dim;
8967 max_jpeg_dim.width = 0;
8968 max_jpeg_dim.height = 0;
8969 curr_jpeg_dim.width = 0;
8970 curr_jpeg_dim.height = 0;
8971 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8972 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8973 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8974 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8975 max_jpeg_dim.width * max_jpeg_dim.height ) {
8976 max_jpeg_dim.width = curr_jpeg_dim.width;
8977 max_jpeg_dim.height = curr_jpeg_dim.height;
8978 }
8979 }
8980 return max_jpeg_dim;
8981}
8982
8983/*===========================================================================
8984 * FUNCTION : addStreamConfig
8985 *
8986 * DESCRIPTION: adds the stream configuration to the array
8987 *
8988 * PARAMETERS :
8989 * @available_stream_configs : pointer to stream configuration array
8990 * @scalar_format : scalar format
8991 * @dim : configuration dimension
8992 * @config_type : input or output configuration type
8993 *
8994 * RETURN : NONE
8995 *==========================================================================*/
8996void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8997 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8998{
8999 available_stream_configs.add(scalar_format);
9000 available_stream_configs.add(dim.width);
9001 available_stream_configs.add(dim.height);
9002 available_stream_configs.add(config_type);
9003}
9004
9005/*===========================================================================
9006 * FUNCTION : suppportBurstCapture
9007 *
9008 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9009 *
9010 * PARAMETERS :
9011 * @cameraId : camera Id
9012 *
9013 * RETURN : true if camera supports BURST_CAPTURE
9014 * false otherwise
9015 *==========================================================================*/
9016bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9017{
9018 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9019 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9020 const int32_t highResWidth = 3264;
9021 const int32_t highResHeight = 2448;
9022
9023 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9024 // Maximum resolution images cannot be captured at >= 10fps
9025 // -> not supporting BURST_CAPTURE
9026 return false;
9027 }
9028
9029 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9030 // Maximum resolution images can be captured at >= 20fps
9031 // --> supporting BURST_CAPTURE
9032 return true;
9033 }
9034
9035 // Find the smallest highRes resolution, or largest resolution if there is none
9036 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9037 MAX_SIZES_CNT);
9038 size_t highRes = 0;
9039 while ((highRes + 1 < totalCnt) &&
9040 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9041 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9042 highResWidth * highResHeight)) {
9043 highRes++;
9044 }
9045 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9046 return true;
9047 } else {
9048 return false;
9049 }
9050}
9051
9052/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009053 * FUNCTION : getPDStatIndex
9054 *
9055 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9056 *
9057 * PARAMETERS :
9058 * @caps : camera capabilities
9059 *
9060 * RETURN : int32_t type
9061 * non-negative - on success
9062 * -1 - on failure
9063 *==========================================================================*/
9064int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9065 if (nullptr == caps) {
9066 return -1;
9067 }
9068
9069 uint32_t metaRawCount = caps->meta_raw_channel_count;
9070 int32_t ret = -1;
9071 for (size_t i = 0; i < metaRawCount; i++) {
9072 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9073 ret = i;
9074 break;
9075 }
9076 }
9077
9078 return ret;
9079}
9080
9081/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009082 * FUNCTION : initStaticMetadata
9083 *
9084 * DESCRIPTION: initialize the static metadata
9085 *
9086 * PARAMETERS :
9087 * @cameraId : camera Id
9088 *
9089 * RETURN : int32_t type of status
9090 * 0 -- success
9091 * non-zero failure code
9092 *==========================================================================*/
9093int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9094{
9095 int rc = 0;
9096 CameraMetadata staticInfo;
9097 size_t count = 0;
9098 bool limitedDevice = false;
9099 char prop[PROPERTY_VALUE_MAX];
9100 bool supportBurst = false;
9101
9102 supportBurst = supportBurstCapture(cameraId);
9103
9104 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9105 * guaranteed or if min fps of max resolution is less than 20 fps, its
9106 * advertised as limited device*/
9107 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9108 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9109 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9110 !supportBurst;
9111
9112 uint8_t supportedHwLvl = limitedDevice ?
9113 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009114#ifndef USE_HAL_3_3
9115 // LEVEL_3 - This device will support level 3.
9116 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9117#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009118 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009119#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009120
9121 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9122 &supportedHwLvl, 1);
9123
9124 bool facingBack = false;
9125 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9126 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9127 facingBack = true;
9128 }
9129 /*HAL 3 only*/
9130 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9131 &gCamCapability[cameraId]->min_focus_distance, 1);
9132
9133 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9134 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9135
9136 /*should be using focal lengths but sensor doesn't provide that info now*/
9137 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9138 &gCamCapability[cameraId]->focal_length,
9139 1);
9140
9141 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9142 gCamCapability[cameraId]->apertures,
9143 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9144
9145 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9146 gCamCapability[cameraId]->filter_densities,
9147 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9148
9149
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009150 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9151 size_t mode_count =
9152 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9153 for (size_t i = 0; i < mode_count; i++) {
9154 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9155 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009156 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009157 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009158
9159 int32_t lens_shading_map_size[] = {
9160 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9161 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9162 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9163 lens_shading_map_size,
9164 sizeof(lens_shading_map_size)/sizeof(int32_t));
9165
9166 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9167 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9168
9169 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9170 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9171
9172 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9173 &gCamCapability[cameraId]->max_frame_duration, 1);
9174
9175 camera_metadata_rational baseGainFactor = {
9176 gCamCapability[cameraId]->base_gain_factor.numerator,
9177 gCamCapability[cameraId]->base_gain_factor.denominator};
9178 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9179 &baseGainFactor, 1);
9180
9181 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9182 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9183
9184 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9185 gCamCapability[cameraId]->pixel_array_size.height};
9186 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9187 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9188
9189 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9190 gCamCapability[cameraId]->active_array_size.top,
9191 gCamCapability[cameraId]->active_array_size.width,
9192 gCamCapability[cameraId]->active_array_size.height};
9193 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9194 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9195
9196 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9197 &gCamCapability[cameraId]->white_level, 1);
9198
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009199 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9200 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9201 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009202 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009203 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009204
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009205#ifndef USE_HAL_3_3
9206 bool hasBlackRegions = false;
9207 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9208 LOGW("black_region_count: %d is bounded to %d",
9209 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9210 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9211 }
9212 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9213 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9214 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9215 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9216 }
9217 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9218 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9219 hasBlackRegions = true;
9220 }
9221#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009222 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9223 &gCamCapability[cameraId]->flash_charge_duration, 1);
9224
9225 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9226 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9227
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009228 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9229 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9230 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9232 &timestampSource, 1);
9233
Thierry Strudel54dc9782017-02-15 12:12:10 -08009234 //update histogram vendor data
9235 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009236 &gCamCapability[cameraId]->histogram_size, 1);
9237
Thierry Strudel54dc9782017-02-15 12:12:10 -08009238 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009239 &gCamCapability[cameraId]->max_histogram_count, 1);
9240
Shuzhen Wang14415f52016-11-16 18:26:18 -08009241 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9242 //so that app can request fewer number of bins than the maximum supported.
9243 std::vector<int32_t> histBins;
9244 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9245 histBins.push_back(maxHistBins);
9246 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9247 (maxHistBins & 0x1) == 0) {
9248 histBins.push_back(maxHistBins >> 1);
9249 maxHistBins >>= 1;
9250 }
9251 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9252 histBins.data(), histBins.size());
9253
Thierry Strudel3d639192016-09-09 11:52:26 -07009254 int32_t sharpness_map_size[] = {
9255 gCamCapability[cameraId]->sharpness_map_size.width,
9256 gCamCapability[cameraId]->sharpness_map_size.height};
9257
9258 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9259 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9260
9261 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9262 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9263
Emilian Peev0f3c3162017-03-15 12:57:46 +00009264 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9265 if (0 <= indexPD) {
9266 // Advertise PD stats data as part of the Depth capabilities
9267 int32_t depthWidth =
9268 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9269 int32_t depthHeight =
9270 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9271 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9272 assert(0 < depthSamplesCount);
9273 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9274 &depthSamplesCount, 1);
9275
9276 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9277 depthHeight,
9278 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9279 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9280 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9281 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9282 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9283
9284 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9285 depthHeight, 33333333,
9286 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9287 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9288 depthMinDuration,
9289 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9290
9291 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9292 depthHeight, 0,
9293 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9294 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9295 depthStallDuration,
9296 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9297
9298 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9299 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9300 }
9301
Thierry Strudel3d639192016-09-09 11:52:26 -07009302 int32_t scalar_formats[] = {
9303 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9304 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9305 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9306 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9307 HAL_PIXEL_FORMAT_RAW10,
9308 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009309 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9310 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9311 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009312
9313 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9314 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9315 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9316 count, MAX_SIZES_CNT, available_processed_sizes);
9317 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9318 available_processed_sizes, count * 2);
9319
9320 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9321 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9322 makeTable(gCamCapability[cameraId]->raw_dim,
9323 count, MAX_SIZES_CNT, available_raw_sizes);
9324 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9325 available_raw_sizes, count * 2);
9326
9327 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9328 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9329 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9330 count, MAX_SIZES_CNT, available_fps_ranges);
9331 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9332 available_fps_ranges, count * 2);
9333
9334 camera_metadata_rational exposureCompensationStep = {
9335 gCamCapability[cameraId]->exp_compensation_step.numerator,
9336 gCamCapability[cameraId]->exp_compensation_step.denominator};
9337 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9338 &exposureCompensationStep, 1);
9339
9340 Vector<uint8_t> availableVstabModes;
9341 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9342 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009343 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009344 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009345 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009346 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009347 count = IS_TYPE_MAX;
9348 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9349 for (size_t i = 0; i < count; i++) {
9350 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9351 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9352 eisSupported = true;
9353 break;
9354 }
9355 }
9356 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009357 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9358 }
9359 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9360 availableVstabModes.array(), availableVstabModes.size());
9361
9362 /*HAL 1 and HAL 3 common*/
9363 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9364 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9365 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009366 // Cap the max zoom to the max preferred value
9367 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009368 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9369 &maxZoom, 1);
9370
9371 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9372 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9373
9374 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9375 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9376 max3aRegions[2] = 0; /* AF not supported */
9377 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9378 max3aRegions, 3);
9379
9380 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9381 memset(prop, 0, sizeof(prop));
9382 property_get("persist.camera.facedetect", prop, "1");
9383 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9384 LOGD("Support face detection mode: %d",
9385 supportedFaceDetectMode);
9386
9387 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009388 /* support mode should be OFF if max number of face is 0 */
9389 if (maxFaces <= 0) {
9390 supportedFaceDetectMode = 0;
9391 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009392 Vector<uint8_t> availableFaceDetectModes;
9393 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9394 if (supportedFaceDetectMode == 1) {
9395 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9396 } else if (supportedFaceDetectMode == 2) {
9397 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9398 } else if (supportedFaceDetectMode == 3) {
9399 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9400 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9401 } else {
9402 maxFaces = 0;
9403 }
9404 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9405 availableFaceDetectModes.array(),
9406 availableFaceDetectModes.size());
9407 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9408 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009409 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9410 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9411 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009412
9413 int32_t exposureCompensationRange[] = {
9414 gCamCapability[cameraId]->exposure_compensation_min,
9415 gCamCapability[cameraId]->exposure_compensation_max};
9416 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9417 exposureCompensationRange,
9418 sizeof(exposureCompensationRange)/sizeof(int32_t));
9419
9420 uint8_t lensFacing = (facingBack) ?
9421 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9422 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9423
9424 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9425 available_thumbnail_sizes,
9426 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9427
9428 /*all sizes will be clubbed into this tag*/
9429 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9430 /*android.scaler.availableStreamConfigurations*/
9431 Vector<int32_t> available_stream_configs;
9432 cam_dimension_t active_array_dim;
9433 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9434 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009435
9436 /*advertise list of input dimensions supported based on below property.
9437 By default all sizes upto 5MP will be advertised.
9438 Note that the setprop resolution format should be WxH.
9439 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9440 To list all supported sizes, setprop needs to be set with "0x0" */
9441 cam_dimension_t minInputSize = {2592,1944}; //5MP
9442 memset(prop, 0, sizeof(prop));
9443 property_get("persist.camera.input.minsize", prop, "2592x1944");
9444 if (strlen(prop) > 0) {
9445 char *saveptr = NULL;
9446 char *token = strtok_r(prop, "x", &saveptr);
9447 if (token != NULL) {
9448 minInputSize.width = atoi(token);
9449 }
9450 token = strtok_r(NULL, "x", &saveptr);
9451 if (token != NULL) {
9452 minInputSize.height = atoi(token);
9453 }
9454 }
9455
Thierry Strudel3d639192016-09-09 11:52:26 -07009456 /* Add input/output stream configurations for each scalar formats*/
9457 for (size_t j = 0; j < scalar_formats_count; j++) {
9458 switch (scalar_formats[j]) {
9459 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9460 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9461 case HAL_PIXEL_FORMAT_RAW10:
9462 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9463 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9464 addStreamConfig(available_stream_configs, scalar_formats[j],
9465 gCamCapability[cameraId]->raw_dim[i],
9466 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9467 }
9468 break;
9469 case HAL_PIXEL_FORMAT_BLOB:
9470 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9471 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9472 addStreamConfig(available_stream_configs, scalar_formats[j],
9473 gCamCapability[cameraId]->picture_sizes_tbl[i],
9474 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9475 }
9476 break;
9477 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9478 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9479 default:
9480 cam_dimension_t largest_picture_size;
9481 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9482 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9483 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9484 addStreamConfig(available_stream_configs, scalar_formats[j],
9485 gCamCapability[cameraId]->picture_sizes_tbl[i],
9486 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009487 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009488 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9489 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009490 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9491 >= minInputSize.width) || (gCamCapability[cameraId]->
9492 picture_sizes_tbl[i].height >= minInputSize.height)) {
9493 addStreamConfig(available_stream_configs, scalar_formats[j],
9494 gCamCapability[cameraId]->picture_sizes_tbl[i],
9495 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9496 }
9497 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009498 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009499
Thierry Strudel3d639192016-09-09 11:52:26 -07009500 break;
9501 }
9502 }
9503
9504 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9505 available_stream_configs.array(), available_stream_configs.size());
9506 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9507 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9508
9509 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9510 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9511
9512 /* android.scaler.availableMinFrameDurations */
9513 Vector<int64_t> available_min_durations;
9514 for (size_t j = 0; j < scalar_formats_count; j++) {
9515 switch (scalar_formats[j]) {
9516 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9517 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9518 case HAL_PIXEL_FORMAT_RAW10:
9519 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9520 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9521 available_min_durations.add(scalar_formats[j]);
9522 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9523 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9524 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9525 }
9526 break;
9527 default:
9528 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9529 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9530 available_min_durations.add(scalar_formats[j]);
9531 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9532 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9533 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9534 }
9535 break;
9536 }
9537 }
9538 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9539 available_min_durations.array(), available_min_durations.size());
9540
9541 Vector<int32_t> available_hfr_configs;
9542 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9543 int32_t fps = 0;
9544 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9545 case CAM_HFR_MODE_60FPS:
9546 fps = 60;
9547 break;
9548 case CAM_HFR_MODE_90FPS:
9549 fps = 90;
9550 break;
9551 case CAM_HFR_MODE_120FPS:
9552 fps = 120;
9553 break;
9554 case CAM_HFR_MODE_150FPS:
9555 fps = 150;
9556 break;
9557 case CAM_HFR_MODE_180FPS:
9558 fps = 180;
9559 break;
9560 case CAM_HFR_MODE_210FPS:
9561 fps = 210;
9562 break;
9563 case CAM_HFR_MODE_240FPS:
9564 fps = 240;
9565 break;
9566 case CAM_HFR_MODE_480FPS:
9567 fps = 480;
9568 break;
9569 case CAM_HFR_MODE_OFF:
9570 case CAM_HFR_MODE_MAX:
9571 default:
9572 break;
9573 }
9574
9575 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9576 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9577 /* For each HFR frame rate, need to advertise one variable fps range
9578 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9579 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9580 * set by the app. When video recording is started, [120, 120] is
9581 * set. This way sensor configuration does not change when recording
9582 * is started */
9583
9584 /* (width, height, fps_min, fps_max, batch_size_max) */
9585 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9586 j < MAX_SIZES_CNT; j++) {
9587 available_hfr_configs.add(
9588 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9589 available_hfr_configs.add(
9590 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9591 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9592 available_hfr_configs.add(fps);
9593 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9594
9595 /* (width, height, fps_min, fps_max, batch_size_max) */
9596 available_hfr_configs.add(
9597 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9598 available_hfr_configs.add(
9599 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9600 available_hfr_configs.add(fps);
9601 available_hfr_configs.add(fps);
9602 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9603 }
9604 }
9605 }
9606 //Advertise HFR capability only if the property is set
9607 memset(prop, 0, sizeof(prop));
9608 property_get("persist.camera.hal3hfr.enable", prop, "1");
9609 uint8_t hfrEnable = (uint8_t)atoi(prop);
9610
9611 if(hfrEnable && available_hfr_configs.array()) {
9612 staticInfo.update(
9613 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9614 available_hfr_configs.array(), available_hfr_configs.size());
9615 }
9616
9617 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9618 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9619 &max_jpeg_size, 1);
9620
9621 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9622 size_t size = 0;
9623 count = CAM_EFFECT_MODE_MAX;
9624 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9625 for (size_t i = 0; i < count; i++) {
9626 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9627 gCamCapability[cameraId]->supported_effects[i]);
9628 if (NAME_NOT_FOUND != val) {
9629 avail_effects[size] = (uint8_t)val;
9630 size++;
9631 }
9632 }
9633 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9634 avail_effects,
9635 size);
9636
9637 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9638 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9639 size_t supported_scene_modes_cnt = 0;
9640 count = CAM_SCENE_MODE_MAX;
9641 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9642 for (size_t i = 0; i < count; i++) {
9643 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9644 CAM_SCENE_MODE_OFF) {
9645 int val = lookupFwkName(SCENE_MODES_MAP,
9646 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9647 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009648
Thierry Strudel3d639192016-09-09 11:52:26 -07009649 if (NAME_NOT_FOUND != val) {
9650 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9651 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9652 supported_scene_modes_cnt++;
9653 }
9654 }
9655 }
9656 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9657 avail_scene_modes,
9658 supported_scene_modes_cnt);
9659
9660 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9661 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9662 supported_scene_modes_cnt,
9663 CAM_SCENE_MODE_MAX,
9664 scene_mode_overrides,
9665 supported_indexes,
9666 cameraId);
9667
9668 if (supported_scene_modes_cnt == 0) {
9669 supported_scene_modes_cnt = 1;
9670 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9671 }
9672
9673 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9674 scene_mode_overrides, supported_scene_modes_cnt * 3);
9675
9676 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9677 ANDROID_CONTROL_MODE_AUTO,
9678 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9679 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9680 available_control_modes,
9681 3);
9682
9683 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9684 size = 0;
9685 count = CAM_ANTIBANDING_MODE_MAX;
9686 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9687 for (size_t i = 0; i < count; i++) {
9688 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9689 gCamCapability[cameraId]->supported_antibandings[i]);
9690 if (NAME_NOT_FOUND != val) {
9691 avail_antibanding_modes[size] = (uint8_t)val;
9692 size++;
9693 }
9694
9695 }
9696 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9697 avail_antibanding_modes,
9698 size);
9699
9700 uint8_t avail_abberation_modes[] = {
9701 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9702 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9703 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9704 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9705 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9706 if (0 == count) {
9707 // If no aberration correction modes are available for a device, this advertise OFF mode
9708 size = 1;
9709 } else {
9710 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9711 // So, advertize all 3 modes if atleast any one mode is supported as per the
9712 // new M requirement
9713 size = 3;
9714 }
9715 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9716 avail_abberation_modes,
9717 size);
9718
9719 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9720 size = 0;
9721 count = CAM_FOCUS_MODE_MAX;
9722 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9723 for (size_t i = 0; i < count; i++) {
9724 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9725 gCamCapability[cameraId]->supported_focus_modes[i]);
9726 if (NAME_NOT_FOUND != val) {
9727 avail_af_modes[size] = (uint8_t)val;
9728 size++;
9729 }
9730 }
9731 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9732 avail_af_modes,
9733 size);
9734
9735 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9736 size = 0;
9737 count = CAM_WB_MODE_MAX;
9738 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9739 for (size_t i = 0; i < count; i++) {
9740 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9741 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9742 gCamCapability[cameraId]->supported_white_balances[i]);
9743 if (NAME_NOT_FOUND != val) {
9744 avail_awb_modes[size] = (uint8_t)val;
9745 size++;
9746 }
9747 }
9748 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9749 avail_awb_modes,
9750 size);
9751
9752 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9753 count = CAM_FLASH_FIRING_LEVEL_MAX;
9754 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9755 count);
9756 for (size_t i = 0; i < count; i++) {
9757 available_flash_levels[i] =
9758 gCamCapability[cameraId]->supported_firing_levels[i];
9759 }
9760 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9761 available_flash_levels, count);
9762
9763 uint8_t flashAvailable;
9764 if (gCamCapability[cameraId]->flash_available)
9765 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9766 else
9767 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9768 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9769 &flashAvailable, 1);
9770
9771 Vector<uint8_t> avail_ae_modes;
9772 count = CAM_AE_MODE_MAX;
9773 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9774 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009775 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9776 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9777 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9778 }
9779 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009780 }
9781 if (flashAvailable) {
9782 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9783 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9784 }
9785 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9786 avail_ae_modes.array(),
9787 avail_ae_modes.size());
9788
9789 int32_t sensitivity_range[2];
9790 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9791 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9792 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9793 sensitivity_range,
9794 sizeof(sensitivity_range) / sizeof(int32_t));
9795
9796 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9797 &gCamCapability[cameraId]->max_analog_sensitivity,
9798 1);
9799
9800 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9801 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9802 &sensor_orientation,
9803 1);
9804
9805 int32_t max_output_streams[] = {
9806 MAX_STALLING_STREAMS,
9807 MAX_PROCESSED_STREAMS,
9808 MAX_RAW_STREAMS};
9809 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9810 max_output_streams,
9811 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9812
9813 uint8_t avail_leds = 0;
9814 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9815 &avail_leds, 0);
9816
9817 uint8_t focus_dist_calibrated;
9818 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9819 gCamCapability[cameraId]->focus_dist_calibrated);
9820 if (NAME_NOT_FOUND != val) {
9821 focus_dist_calibrated = (uint8_t)val;
9822 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9823 &focus_dist_calibrated, 1);
9824 }
9825
9826 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9827 size = 0;
9828 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9829 MAX_TEST_PATTERN_CNT);
9830 for (size_t i = 0; i < count; i++) {
9831 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9832 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9833 if (NAME_NOT_FOUND != testpatternMode) {
9834 avail_testpattern_modes[size] = testpatternMode;
9835 size++;
9836 }
9837 }
9838 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9839 avail_testpattern_modes,
9840 size);
9841
9842 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9843 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9844 &max_pipeline_depth,
9845 1);
9846
9847 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9848 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9849 &partial_result_count,
9850 1);
9851
9852 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9853 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9854
9855 Vector<uint8_t> available_capabilities;
9856 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9857 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9858 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9859 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9860 if (supportBurst) {
9861 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9862 }
9863 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9864 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9865 if (hfrEnable && available_hfr_configs.array()) {
9866 available_capabilities.add(
9867 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9868 }
9869
9870 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9871 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9872 }
9873 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9874 available_capabilities.array(),
9875 available_capabilities.size());
9876
9877 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9878 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9879 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9880 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9881
9882 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9883 &aeLockAvailable, 1);
9884
9885 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9886 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9887 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9888 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9889
9890 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9891 &awbLockAvailable, 1);
9892
9893 int32_t max_input_streams = 1;
9894 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9895 &max_input_streams,
9896 1);
9897
9898 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9899 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9900 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9901 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9902 HAL_PIXEL_FORMAT_YCbCr_420_888};
9903 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9904 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9905
9906 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9907 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9908 &max_latency,
9909 1);
9910
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009911#ifndef USE_HAL_3_3
9912 int32_t isp_sensitivity_range[2];
9913 isp_sensitivity_range[0] =
9914 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9915 isp_sensitivity_range[1] =
9916 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9917 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9918 isp_sensitivity_range,
9919 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9920#endif
9921
Thierry Strudel3d639192016-09-09 11:52:26 -07009922 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9923 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9924 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9925 available_hot_pixel_modes,
9926 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9927
9928 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9929 ANDROID_SHADING_MODE_FAST,
9930 ANDROID_SHADING_MODE_HIGH_QUALITY};
9931 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9932 available_shading_modes,
9933 3);
9934
9935 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9936 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9937 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9938 available_lens_shading_map_modes,
9939 2);
9940
9941 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9942 ANDROID_EDGE_MODE_FAST,
9943 ANDROID_EDGE_MODE_HIGH_QUALITY,
9944 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9945 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9946 available_edge_modes,
9947 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9948
9949 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9950 ANDROID_NOISE_REDUCTION_MODE_FAST,
9951 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9952 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9953 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9954 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9955 available_noise_red_modes,
9956 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9957
9958 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9959 ANDROID_TONEMAP_MODE_FAST,
9960 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9961 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9962 available_tonemap_modes,
9963 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9964
9965 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9966 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9967 available_hot_pixel_map_modes,
9968 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9969
9970 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9971 gCamCapability[cameraId]->reference_illuminant1);
9972 if (NAME_NOT_FOUND != val) {
9973 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9974 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9975 }
9976
9977 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9978 gCamCapability[cameraId]->reference_illuminant2);
9979 if (NAME_NOT_FOUND != val) {
9980 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9981 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9982 }
9983
9984 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9985 (void *)gCamCapability[cameraId]->forward_matrix1,
9986 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9987
9988 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9989 (void *)gCamCapability[cameraId]->forward_matrix2,
9990 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9991
9992 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9993 (void *)gCamCapability[cameraId]->color_transform1,
9994 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9995
9996 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9997 (void *)gCamCapability[cameraId]->color_transform2,
9998 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9999
10000 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10001 (void *)gCamCapability[cameraId]->calibration_transform1,
10002 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10003
10004 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10005 (void *)gCamCapability[cameraId]->calibration_transform2,
10006 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10007
10008 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10009 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10010 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10011 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10012 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10013 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10014 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10015 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10016 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10017 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10018 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10019 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10020 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10021 ANDROID_JPEG_GPS_COORDINATES,
10022 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10023 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10024 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10025 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10026 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10027 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10028 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10029 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10030 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10031 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010032#ifndef USE_HAL_3_3
10033 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10034#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010035 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010036 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010037 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10038 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010039 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010040 /* DevCamDebug metadata request_keys_basic */
10041 DEVCAMDEBUG_META_ENABLE,
10042 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010043 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010044 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010045 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010046 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010047 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010048
10049 size_t request_keys_cnt =
10050 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10051 Vector<int32_t> available_request_keys;
10052 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10053 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10054 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10055 }
10056
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010057 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010058 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010059 }
10060
Thierry Strudel3d639192016-09-09 11:52:26 -070010061 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10062 available_request_keys.array(), available_request_keys.size());
10063
10064 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10065 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10066 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10067 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10068 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10069 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10070 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10071 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10072 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10073 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10074 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10075 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10076 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10077 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10078 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10079 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10080 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010081 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010082 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10083 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10084 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010085 ANDROID_STATISTICS_FACE_SCORES,
10086#ifndef USE_HAL_3_3
10087 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10088#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010089 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010090 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010091 // DevCamDebug metadata result_keys_basic
10092 DEVCAMDEBUG_META_ENABLE,
10093 // DevCamDebug metadata result_keys AF
10094 DEVCAMDEBUG_AF_LENS_POSITION,
10095 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10096 DEVCAMDEBUG_AF_TOF_DISTANCE,
10097 DEVCAMDEBUG_AF_LUMA,
10098 DEVCAMDEBUG_AF_HAF_STATE,
10099 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10100 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10101 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10102 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10103 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10104 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10105 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10106 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10107 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10108 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10109 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10110 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10111 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10112 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10113 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10114 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10115 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10116 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10117 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10118 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10119 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10120 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10121 // DevCamDebug metadata result_keys AEC
10122 DEVCAMDEBUG_AEC_TARGET_LUMA,
10123 DEVCAMDEBUG_AEC_COMP_LUMA,
10124 DEVCAMDEBUG_AEC_AVG_LUMA,
10125 DEVCAMDEBUG_AEC_CUR_LUMA,
10126 DEVCAMDEBUG_AEC_LINECOUNT,
10127 DEVCAMDEBUG_AEC_REAL_GAIN,
10128 DEVCAMDEBUG_AEC_EXP_INDEX,
10129 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010130 // DevCamDebug metadata result_keys zzHDR
10131 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10132 DEVCAMDEBUG_AEC_L_LINECOUNT,
10133 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10134 DEVCAMDEBUG_AEC_S_LINECOUNT,
10135 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10136 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10137 // DevCamDebug metadata result_keys ADRC
10138 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10139 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10140 DEVCAMDEBUG_AEC_GTM_RATIO,
10141 DEVCAMDEBUG_AEC_LTM_RATIO,
10142 DEVCAMDEBUG_AEC_LA_RATIO,
10143 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010144 // DevCamDebug metadata result_keys AWB
10145 DEVCAMDEBUG_AWB_R_GAIN,
10146 DEVCAMDEBUG_AWB_G_GAIN,
10147 DEVCAMDEBUG_AWB_B_GAIN,
10148 DEVCAMDEBUG_AWB_CCT,
10149 DEVCAMDEBUG_AWB_DECISION,
10150 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010151 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10152 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10153 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010154 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010155 };
10156
Thierry Strudel3d639192016-09-09 11:52:26 -070010157 size_t result_keys_cnt =
10158 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10159
10160 Vector<int32_t> available_result_keys;
10161 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10162 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10163 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10164 }
10165 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10166 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10167 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10168 }
10169 if (supportedFaceDetectMode == 1) {
10170 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10171 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10172 } else if ((supportedFaceDetectMode == 2) ||
10173 (supportedFaceDetectMode == 3)) {
10174 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10175 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10176 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010177#ifndef USE_HAL_3_3
10178 if (hasBlackRegions) {
10179 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10180 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10181 }
10182#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010183
10184 if (gExposeEnableZslKey) {
10185 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10186 }
10187
Thierry Strudel3d639192016-09-09 11:52:26 -070010188 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10189 available_result_keys.array(), available_result_keys.size());
10190
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010191 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010192 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10193 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10194 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10195 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10196 ANDROID_SCALER_CROPPING_TYPE,
10197 ANDROID_SYNC_MAX_LATENCY,
10198 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10199 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10200 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10201 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10202 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10203 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10204 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10205 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10206 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10207 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10208 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10209 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10210 ANDROID_LENS_FACING,
10211 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10212 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10213 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10214 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10215 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10216 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10217 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10218 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10219 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10220 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10221 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10222 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10223 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10224 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10225 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10226 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10227 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10228 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10229 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10230 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010231 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010232 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10233 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10234 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10235 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10236 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10237 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10238 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10239 ANDROID_CONTROL_AVAILABLE_MODES,
10240 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10241 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10242 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10243 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010244 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10245#ifndef USE_HAL_3_3
10246 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10247 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10248#endif
10249 };
10250
10251 Vector<int32_t> available_characteristics_keys;
10252 available_characteristics_keys.appendArray(characteristics_keys_basic,
10253 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10254#ifndef USE_HAL_3_3
10255 if (hasBlackRegions) {
10256 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10257 }
10258#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010259
10260 if (0 <= indexPD) {
10261 int32_t depthKeys[] = {
10262 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10263 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10264 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10265 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10266 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10267 };
10268 available_characteristics_keys.appendArray(depthKeys,
10269 sizeof(depthKeys) / sizeof(depthKeys[0]));
10270 }
10271
Thierry Strudel3d639192016-09-09 11:52:26 -070010272 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010273 available_characteristics_keys.array(),
10274 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010275
10276 /*available stall durations depend on the hw + sw and will be different for different devices */
10277 /*have to add for raw after implementation*/
10278 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10279 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10280
10281 Vector<int64_t> available_stall_durations;
10282 for (uint32_t j = 0; j < stall_formats_count; j++) {
10283 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10284 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10285 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10286 available_stall_durations.add(stall_formats[j]);
10287 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10288 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10289 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10290 }
10291 } else {
10292 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10293 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10294 available_stall_durations.add(stall_formats[j]);
10295 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10296 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10297 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10298 }
10299 }
10300 }
10301 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10302 available_stall_durations.array(),
10303 available_stall_durations.size());
10304
10305 //QCAMERA3_OPAQUE_RAW
10306 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10307 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10308 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10309 case LEGACY_RAW:
10310 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10311 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10312 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10313 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10314 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10315 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10316 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10317 break;
10318 case MIPI_RAW:
10319 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10320 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10321 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10322 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10323 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10324 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10325 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10326 break;
10327 default:
10328 LOGE("unknown opaque_raw_format %d",
10329 gCamCapability[cameraId]->opaque_raw_fmt);
10330 break;
10331 }
10332 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10333
10334 Vector<int32_t> strides;
10335 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10336 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10337 cam_stream_buf_plane_info_t buf_planes;
10338 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10339 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10340 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10341 &gCamCapability[cameraId]->padding_info, &buf_planes);
10342 strides.add(buf_planes.plane_info.mp[0].stride);
10343 }
10344 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10345 strides.size());
10346
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010347 //TBD: remove the following line once backend advertises zzHDR in feature mask
10348 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010349 //Video HDR default
10350 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10351 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010352 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010353 int32_t vhdr_mode[] = {
10354 QCAMERA3_VIDEO_HDR_MODE_OFF,
10355 QCAMERA3_VIDEO_HDR_MODE_ON};
10356
10357 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10358 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10359 vhdr_mode, vhdr_mode_count);
10360 }
10361
Thierry Strudel3d639192016-09-09 11:52:26 -070010362 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10363 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10364 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10365
10366 uint8_t isMonoOnly =
10367 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10368 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10369 &isMonoOnly, 1);
10370
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010371#ifndef USE_HAL_3_3
10372 Vector<int32_t> opaque_size;
10373 for (size_t j = 0; j < scalar_formats_count; j++) {
10374 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10375 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10376 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10377 cam_stream_buf_plane_info_t buf_planes;
10378
10379 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10380 &gCamCapability[cameraId]->padding_info, &buf_planes);
10381
10382 if (rc == 0) {
10383 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10384 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10385 opaque_size.add(buf_planes.plane_info.frame_len);
10386 }else {
10387 LOGE("raw frame calculation failed!");
10388 }
10389 }
10390 }
10391 }
10392
10393 if ((opaque_size.size() > 0) &&
10394 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10395 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10396 else
10397 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10398#endif
10399
Thierry Strudel04e026f2016-10-10 11:27:36 -070010400 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10401 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10402 size = 0;
10403 count = CAM_IR_MODE_MAX;
10404 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10405 for (size_t i = 0; i < count; i++) {
10406 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10407 gCamCapability[cameraId]->supported_ir_modes[i]);
10408 if (NAME_NOT_FOUND != val) {
10409 avail_ir_modes[size] = (int32_t)val;
10410 size++;
10411 }
10412 }
10413 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10414 avail_ir_modes, size);
10415 }
10416
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010417 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10418 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10419 size = 0;
10420 count = CAM_AEC_CONVERGENCE_MAX;
10421 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10422 for (size_t i = 0; i < count; i++) {
10423 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10424 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10425 if (NAME_NOT_FOUND != val) {
10426 available_instant_aec_modes[size] = (int32_t)val;
10427 size++;
10428 }
10429 }
10430 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10431 available_instant_aec_modes, size);
10432 }
10433
Thierry Strudel54dc9782017-02-15 12:12:10 -080010434 int32_t sharpness_range[] = {
10435 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10436 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10437 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10438
10439 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10440 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10441 size = 0;
10442 count = CAM_BINNING_CORRECTION_MODE_MAX;
10443 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10444 for (size_t i = 0; i < count; i++) {
10445 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10446 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10447 gCamCapability[cameraId]->supported_binning_modes[i]);
10448 if (NAME_NOT_FOUND != val) {
10449 avail_binning_modes[size] = (int32_t)val;
10450 size++;
10451 }
10452 }
10453 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10454 avail_binning_modes, size);
10455 }
10456
10457 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10458 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10459 size = 0;
10460 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10461 for (size_t i = 0; i < count; i++) {
10462 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10463 gCamCapability[cameraId]->supported_aec_modes[i]);
10464 if (NAME_NOT_FOUND != val)
10465 available_aec_modes[size++] = val;
10466 }
10467 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10468 available_aec_modes, size);
10469 }
10470
10471 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10472 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10473 size = 0;
10474 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10475 for (size_t i = 0; i < count; i++) {
10476 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10477 gCamCapability[cameraId]->supported_iso_modes[i]);
10478 if (NAME_NOT_FOUND != val)
10479 available_iso_modes[size++] = val;
10480 }
10481 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10482 available_iso_modes, size);
10483 }
10484
10485 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010486 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010487 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10488 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10489 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10490
10491 int32_t available_saturation_range[4];
10492 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10493 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10494 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10495 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10496 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10497 available_saturation_range, 4);
10498
10499 uint8_t is_hdr_values[2];
10500 is_hdr_values[0] = 0;
10501 is_hdr_values[1] = 1;
10502 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10503 is_hdr_values, 2);
10504
10505 float is_hdr_confidence_range[2];
10506 is_hdr_confidence_range[0] = 0.0;
10507 is_hdr_confidence_range[1] = 1.0;
10508 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10509 is_hdr_confidence_range, 2);
10510
Emilian Peev0a972ef2017-03-16 10:25:53 +000010511 size_t eepromLength = strnlen(
10512 reinterpret_cast<const char *>(
10513 gCamCapability[cameraId]->eeprom_version_info),
10514 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10515 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010516 char easelInfo[] = ",E:N";
10517 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10518 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10519 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010520 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10521 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010522 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010523 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10524 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10525 }
10526
Thierry Strudel3d639192016-09-09 11:52:26 -070010527 gStaticMetadata[cameraId] = staticInfo.release();
10528 return rc;
10529}
10530
10531/*===========================================================================
10532 * FUNCTION : makeTable
10533 *
10534 * DESCRIPTION: make a table of sizes
10535 *
10536 * PARAMETERS :
10537 *
10538 *
10539 *==========================================================================*/
10540void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10541 size_t max_size, int32_t *sizeTable)
10542{
10543 size_t j = 0;
10544 if (size > max_size) {
10545 size = max_size;
10546 }
10547 for (size_t i = 0; i < size; i++) {
10548 sizeTable[j] = dimTable[i].width;
10549 sizeTable[j+1] = dimTable[i].height;
10550 j+=2;
10551 }
10552}
10553
10554/*===========================================================================
10555 * FUNCTION : makeFPSTable
10556 *
10557 * DESCRIPTION: make a table of fps ranges
10558 *
10559 * PARAMETERS :
10560 *
10561 *==========================================================================*/
10562void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10563 size_t max_size, int32_t *fpsRangesTable)
10564{
10565 size_t j = 0;
10566 if (size > max_size) {
10567 size = max_size;
10568 }
10569 for (size_t i = 0; i < size; i++) {
10570 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10571 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10572 j+=2;
10573 }
10574}
10575
10576/*===========================================================================
10577 * FUNCTION : makeOverridesList
10578 *
10579 * DESCRIPTION: make a list of scene mode overrides
10580 *
10581 * PARAMETERS :
10582 *
10583 *
10584 *==========================================================================*/
10585void QCamera3HardwareInterface::makeOverridesList(
10586 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10587 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10588{
10589 /*daemon will give a list of overrides for all scene modes.
10590 However we should send the fwk only the overrides for the scene modes
10591 supported by the framework*/
10592 size_t j = 0;
10593 if (size > max_size) {
10594 size = max_size;
10595 }
10596 size_t focus_count = CAM_FOCUS_MODE_MAX;
10597 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10598 focus_count);
10599 for (size_t i = 0; i < size; i++) {
10600 bool supt = false;
10601 size_t index = supported_indexes[i];
10602 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10603 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10604 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10605 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10606 overridesTable[index].awb_mode);
10607 if (NAME_NOT_FOUND != val) {
10608 overridesList[j+1] = (uint8_t)val;
10609 }
10610 uint8_t focus_override = overridesTable[index].af_mode;
10611 for (size_t k = 0; k < focus_count; k++) {
10612 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10613 supt = true;
10614 break;
10615 }
10616 }
10617 if (supt) {
10618 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10619 focus_override);
10620 if (NAME_NOT_FOUND != val) {
10621 overridesList[j+2] = (uint8_t)val;
10622 }
10623 } else {
10624 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10625 }
10626 j+=3;
10627 }
10628}
10629
10630/*===========================================================================
10631 * FUNCTION : filterJpegSizes
10632 *
10633 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10634 * could be downscaled to
10635 *
10636 * PARAMETERS :
10637 *
10638 * RETURN : length of jpegSizes array
10639 *==========================================================================*/
10640
10641size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10642 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10643 uint8_t downscale_factor)
10644{
10645 if (0 == downscale_factor) {
10646 downscale_factor = 1;
10647 }
10648
10649 int32_t min_width = active_array_size.width / downscale_factor;
10650 int32_t min_height = active_array_size.height / downscale_factor;
10651 size_t jpegSizesCnt = 0;
10652 if (processedSizesCnt > maxCount) {
10653 processedSizesCnt = maxCount;
10654 }
10655 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10656 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10657 jpegSizes[jpegSizesCnt] = processedSizes[i];
10658 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10659 jpegSizesCnt += 2;
10660 }
10661 }
10662 return jpegSizesCnt;
10663}
10664
10665/*===========================================================================
10666 * FUNCTION : computeNoiseModelEntryS
10667 *
10668 * DESCRIPTION: function to map a given sensitivity to the S noise
10669 * model parameters in the DNG noise model.
10670 *
10671 * PARAMETERS : sens : the sensor sensitivity
10672 *
10673 ** RETURN : S (sensor amplification) noise
10674 *
10675 *==========================================================================*/
10676double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10677 double s = gCamCapability[mCameraId]->gradient_S * sens +
10678 gCamCapability[mCameraId]->offset_S;
10679 return ((s < 0.0) ? 0.0 : s);
10680}
10681
10682/*===========================================================================
10683 * FUNCTION : computeNoiseModelEntryO
10684 *
10685 * DESCRIPTION: function to map a given sensitivity to the O noise
10686 * model parameters in the DNG noise model.
10687 *
10688 * PARAMETERS : sens : the sensor sensitivity
10689 *
10690 ** RETURN : O (sensor readout) noise
10691 *
10692 *==========================================================================*/
10693double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10694 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10695 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10696 1.0 : (1.0 * sens / max_analog_sens);
10697 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10698 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10699 return ((o < 0.0) ? 0.0 : o);
10700}
10701
10702/*===========================================================================
10703 * FUNCTION : getSensorSensitivity
10704 *
10705 * DESCRIPTION: convert iso_mode to an integer value
10706 *
10707 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10708 *
10709 ** RETURN : sensitivity supported by sensor
10710 *
10711 *==========================================================================*/
10712int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10713{
10714 int32_t sensitivity;
10715
10716 switch (iso_mode) {
10717 case CAM_ISO_MODE_100:
10718 sensitivity = 100;
10719 break;
10720 case CAM_ISO_MODE_200:
10721 sensitivity = 200;
10722 break;
10723 case CAM_ISO_MODE_400:
10724 sensitivity = 400;
10725 break;
10726 case CAM_ISO_MODE_800:
10727 sensitivity = 800;
10728 break;
10729 case CAM_ISO_MODE_1600:
10730 sensitivity = 1600;
10731 break;
10732 default:
10733 sensitivity = -1;
10734 break;
10735 }
10736 return sensitivity;
10737}
10738
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010739int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010740 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010741 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10742 // to connect to Easel.
10743 bool doNotpowerOnEasel =
10744 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10745
10746 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010747 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10748 return OK;
10749 }
10750
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010751 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010752 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010753 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010754 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010755 return res;
10756 }
10757
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010758 EaselManagerClientOpened = true;
10759
10760 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010761 if (res != OK) {
10762 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10763 }
10764
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010765 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010766 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010767
10768 // Expose enableZsl key only when HDR+ mode is enabled.
10769 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010770 }
10771
10772 return OK;
10773}
10774
Thierry Strudel3d639192016-09-09 11:52:26 -070010775/*===========================================================================
10776 * FUNCTION : getCamInfo
10777 *
10778 * DESCRIPTION: query camera capabilities
10779 *
10780 * PARAMETERS :
10781 * @cameraId : camera Id
10782 * @info : camera info struct to be filled in with camera capabilities
10783 *
10784 * RETURN : int type of status
10785 * NO_ERROR -- success
10786 * none-zero failure code
10787 *==========================================================================*/
10788int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10789 struct camera_info *info)
10790{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010791 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010792 int rc = 0;
10793
10794 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010795
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010796 {
10797 Mutex::Autolock l(gHdrPlusClientLock);
10798 rc = initHdrPlusClientLocked();
10799 if (rc != OK) {
10800 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10801 pthread_mutex_unlock(&gCamLock);
10802 return rc;
10803 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010804 }
10805
Thierry Strudel3d639192016-09-09 11:52:26 -070010806 if (NULL == gCamCapability[cameraId]) {
10807 rc = initCapabilities(cameraId);
10808 if (rc < 0) {
10809 pthread_mutex_unlock(&gCamLock);
10810 return rc;
10811 }
10812 }
10813
10814 if (NULL == gStaticMetadata[cameraId]) {
10815 rc = initStaticMetadata(cameraId);
10816 if (rc < 0) {
10817 pthread_mutex_unlock(&gCamLock);
10818 return rc;
10819 }
10820 }
10821
10822 switch(gCamCapability[cameraId]->position) {
10823 case CAM_POSITION_BACK:
10824 case CAM_POSITION_BACK_AUX:
10825 info->facing = CAMERA_FACING_BACK;
10826 break;
10827
10828 case CAM_POSITION_FRONT:
10829 case CAM_POSITION_FRONT_AUX:
10830 info->facing = CAMERA_FACING_FRONT;
10831 break;
10832
10833 default:
10834 LOGE("Unknown position type %d for camera id:%d",
10835 gCamCapability[cameraId]->position, cameraId);
10836 rc = -1;
10837 break;
10838 }
10839
10840
10841 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010842#ifndef USE_HAL_3_3
10843 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10844#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010845 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010846#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010847 info->static_camera_characteristics = gStaticMetadata[cameraId];
10848
10849 //For now assume both cameras can operate independently.
10850 info->conflicting_devices = NULL;
10851 info->conflicting_devices_length = 0;
10852
10853 //resource cost is 100 * MIN(1.0, m/M),
10854 //where m is throughput requirement with maximum stream configuration
10855 //and M is CPP maximum throughput.
10856 float max_fps = 0.0;
10857 for (uint32_t i = 0;
10858 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10859 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10860 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10861 }
10862 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10863 gCamCapability[cameraId]->active_array_size.width *
10864 gCamCapability[cameraId]->active_array_size.height * max_fps /
10865 gCamCapability[cameraId]->max_pixel_bandwidth;
10866 info->resource_cost = 100 * MIN(1.0, ratio);
10867 LOGI("camera %d resource cost is %d", cameraId,
10868 info->resource_cost);
10869
10870 pthread_mutex_unlock(&gCamLock);
10871 return rc;
10872}
10873
10874/*===========================================================================
10875 * FUNCTION : translateCapabilityToMetadata
10876 *
10877 * DESCRIPTION: translate the capability into camera_metadata_t
10878 *
10879 * PARAMETERS : type of the request
10880 *
10881 *
10882 * RETURN : success: camera_metadata_t*
10883 * failure: NULL
10884 *
10885 *==========================================================================*/
10886camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10887{
10888 if (mDefaultMetadata[type] != NULL) {
10889 return mDefaultMetadata[type];
10890 }
10891 //first time we are handling this request
10892 //fill up the metadata structure using the wrapper class
10893 CameraMetadata settings;
10894 //translate from cam_capability_t to camera_metadata_tag_t
10895 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10896 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10897 int32_t defaultRequestID = 0;
10898 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10899
10900 /* OIS disable */
10901 char ois_prop[PROPERTY_VALUE_MAX];
10902 memset(ois_prop, 0, sizeof(ois_prop));
10903 property_get("persist.camera.ois.disable", ois_prop, "0");
10904 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10905
10906 /* Force video to use OIS */
10907 char videoOisProp[PROPERTY_VALUE_MAX];
10908 memset(videoOisProp, 0, sizeof(videoOisProp));
10909 property_get("persist.camera.ois.video", videoOisProp, "1");
10910 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010911
10912 // Hybrid AE enable/disable
10913 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10914 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10915 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10916 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10917
Thierry Strudel3d639192016-09-09 11:52:26 -070010918 uint8_t controlIntent = 0;
10919 uint8_t focusMode;
10920 uint8_t vsMode;
10921 uint8_t optStabMode;
10922 uint8_t cacMode;
10923 uint8_t edge_mode;
10924 uint8_t noise_red_mode;
10925 uint8_t tonemap_mode;
10926 bool highQualityModeEntryAvailable = FALSE;
10927 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010928 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010929 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10930 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010931 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010932 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010933 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010934
Thierry Strudel3d639192016-09-09 11:52:26 -070010935 switch (type) {
10936 case CAMERA3_TEMPLATE_PREVIEW:
10937 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10938 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10939 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10940 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10941 edge_mode = ANDROID_EDGE_MODE_FAST;
10942 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10943 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10944 break;
10945 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10946 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10947 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10948 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10949 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10950 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10951 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10952 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10953 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10954 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10955 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10956 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10957 highQualityModeEntryAvailable = TRUE;
10958 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10959 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10960 fastModeEntryAvailable = TRUE;
10961 }
10962 }
10963 if (highQualityModeEntryAvailable) {
10964 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10965 } else if (fastModeEntryAvailable) {
10966 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10967 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010968 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10969 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10970 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010971 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010972 break;
10973 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10974 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10975 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10976 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010977 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10978 edge_mode = ANDROID_EDGE_MODE_FAST;
10979 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10980 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10981 if (forceVideoOis)
10982 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10983 break;
10984 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10985 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10986 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10987 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010988 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10989 edge_mode = ANDROID_EDGE_MODE_FAST;
10990 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10991 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10992 if (forceVideoOis)
10993 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10994 break;
10995 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10996 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10997 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10998 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10999 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11000 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11001 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11002 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11003 break;
11004 case CAMERA3_TEMPLATE_MANUAL:
11005 edge_mode = ANDROID_EDGE_MODE_FAST;
11006 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11007 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11008 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11009 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11010 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11011 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11012 break;
11013 default:
11014 edge_mode = ANDROID_EDGE_MODE_FAST;
11015 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11016 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11017 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11018 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11019 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11020 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11021 break;
11022 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011023 // Set CAC to OFF if underlying device doesn't support
11024 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11025 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11026 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011027 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11028 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11029 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11030 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11031 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11032 }
11033 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011034 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011035 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011036
11037 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11038 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11039 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11040 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11041 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11042 || ois_disable)
11043 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11044 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011045 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011046
11047 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11048 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11049
11050 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11051 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11052
11053 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11054 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11055
11056 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11057 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11058
11059 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11060 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11061
11062 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11063 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11064
11065 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11066 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11067
11068 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11069 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11070
11071 /*flash*/
11072 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11073 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11074
11075 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11076 settings.update(ANDROID_FLASH_FIRING_POWER,
11077 &flashFiringLevel, 1);
11078
11079 /* lens */
11080 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11081 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11082
11083 if (gCamCapability[mCameraId]->filter_densities_count) {
11084 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11085 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11086 gCamCapability[mCameraId]->filter_densities_count);
11087 }
11088
11089 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11090 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11091
Thierry Strudel3d639192016-09-09 11:52:26 -070011092 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11093 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11094
11095 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11096 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11097
11098 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11099 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11100
11101 /* face detection (default to OFF) */
11102 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11103 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11104
Thierry Strudel54dc9782017-02-15 12:12:10 -080011105 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11106 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011107
11108 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11109 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11110
11111 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11112 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11113
Thierry Strudel3d639192016-09-09 11:52:26 -070011114
11115 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11116 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11117
11118 /* Exposure time(Update the Min Exposure Time)*/
11119 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11120 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11121
11122 /* frame duration */
11123 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11124 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11125
11126 /* sensitivity */
11127 static const int32_t default_sensitivity = 100;
11128 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011129#ifndef USE_HAL_3_3
11130 static const int32_t default_isp_sensitivity =
11131 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11132 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11133#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011134
11135 /*edge mode*/
11136 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11137
11138 /*noise reduction mode*/
11139 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11140
11141 /*color correction mode*/
11142 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11143 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11144
11145 /*transform matrix mode*/
11146 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11147
11148 int32_t scaler_crop_region[4];
11149 scaler_crop_region[0] = 0;
11150 scaler_crop_region[1] = 0;
11151 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11152 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11153 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11154
11155 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11156 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11157
11158 /*focus distance*/
11159 float focus_distance = 0.0;
11160 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11161
11162 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011163 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 float max_range = 0.0;
11165 float max_fixed_fps = 0.0;
11166 int32_t fps_range[2] = {0, 0};
11167 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11168 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011169 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11170 TEMPLATE_MAX_PREVIEW_FPS) {
11171 continue;
11172 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011173 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11174 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11175 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11176 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11177 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11178 if (range > max_range) {
11179 fps_range[0] =
11180 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11181 fps_range[1] =
11182 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11183 max_range = range;
11184 }
11185 } else {
11186 if (range < 0.01 && max_fixed_fps <
11187 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11188 fps_range[0] =
11189 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11190 fps_range[1] =
11191 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11192 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11193 }
11194 }
11195 }
11196 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11197
11198 /*precapture trigger*/
11199 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11200 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11201
11202 /*af trigger*/
11203 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11204 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11205
11206 /* ae & af regions */
11207 int32_t active_region[] = {
11208 gCamCapability[mCameraId]->active_array_size.left,
11209 gCamCapability[mCameraId]->active_array_size.top,
11210 gCamCapability[mCameraId]->active_array_size.left +
11211 gCamCapability[mCameraId]->active_array_size.width,
11212 gCamCapability[mCameraId]->active_array_size.top +
11213 gCamCapability[mCameraId]->active_array_size.height,
11214 0};
11215 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11216 sizeof(active_region) / sizeof(active_region[0]));
11217 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11218 sizeof(active_region) / sizeof(active_region[0]));
11219
11220 /* black level lock */
11221 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11222 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11223
Thierry Strudel3d639192016-09-09 11:52:26 -070011224 //special defaults for manual template
11225 if (type == CAMERA3_TEMPLATE_MANUAL) {
11226 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11227 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11228
11229 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11230 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11231
11232 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11233 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11234
11235 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11236 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11237
11238 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11239 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11240
11241 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11242 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11243 }
11244
11245
11246 /* TNR
11247 * We'll use this location to determine which modes TNR will be set.
11248 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11249 * This is not to be confused with linking on a per stream basis that decision
11250 * is still on per-session basis and will be handled as part of config stream
11251 */
11252 uint8_t tnr_enable = 0;
11253
11254 if (m_bTnrPreview || m_bTnrVideo) {
11255
11256 switch (type) {
11257 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11258 tnr_enable = 1;
11259 break;
11260
11261 default:
11262 tnr_enable = 0;
11263 break;
11264 }
11265
11266 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11267 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11268 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11269
11270 LOGD("TNR:%d with process plate %d for template:%d",
11271 tnr_enable, tnr_process_type, type);
11272 }
11273
11274 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011275 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011276 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11277
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011278 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011279 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11280
Shuzhen Wang920ea402017-05-03 08:49:39 -070011281 uint8_t related_camera_id = mCameraId;
11282 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011283
11284 /* CDS default */
11285 char prop[PROPERTY_VALUE_MAX];
11286 memset(prop, 0, sizeof(prop));
11287 property_get("persist.camera.CDS", prop, "Auto");
11288 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11289 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11290 if (CAM_CDS_MODE_MAX == cds_mode) {
11291 cds_mode = CAM_CDS_MODE_AUTO;
11292 }
11293
11294 /* Disabling CDS in templates which have TNR enabled*/
11295 if (tnr_enable)
11296 cds_mode = CAM_CDS_MODE_OFF;
11297
11298 int32_t mode = cds_mode;
11299 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011300
Thierry Strudel269c81a2016-10-12 12:13:59 -070011301 /* Manual Convergence AEC Speed is disabled by default*/
11302 float default_aec_speed = 0;
11303 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11304
11305 /* Manual Convergence AWB Speed is disabled by default*/
11306 float default_awb_speed = 0;
11307 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11308
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011309 // Set instant AEC to normal convergence by default
11310 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11311 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11312
Shuzhen Wang19463d72016-03-08 11:09:52 -080011313 /* hybrid ae */
11314 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11315
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011316 if (gExposeEnableZslKey) {
11317 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11318 }
11319
Thierry Strudel3d639192016-09-09 11:52:26 -070011320 mDefaultMetadata[type] = settings.release();
11321
11322 return mDefaultMetadata[type];
11323}
11324
11325/*===========================================================================
11326 * FUNCTION : setFrameParameters
11327 *
11328 * DESCRIPTION: set parameters per frame as requested in the metadata from
11329 * framework
11330 *
11331 * PARAMETERS :
11332 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011333 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 * @blob_request: Whether this request is a blob request or not
11335 *
11336 * RETURN : success: NO_ERROR
11337 * failure:
11338 *==========================================================================*/
11339int QCamera3HardwareInterface::setFrameParameters(
11340 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011341 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011342 int blob_request,
11343 uint32_t snapshotStreamId)
11344{
11345 /*translate from camera_metadata_t type to parm_type_t*/
11346 int rc = 0;
11347 int32_t hal_version = CAM_HAL_V3;
11348
11349 clear_metadata_buffer(mParameters);
11350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11351 LOGE("Failed to set hal version in the parameters");
11352 return BAD_VALUE;
11353 }
11354
11355 /*we need to update the frame number in the parameters*/
11356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11357 request->frame_number)) {
11358 LOGE("Failed to set the frame number in the parameters");
11359 return BAD_VALUE;
11360 }
11361
11362 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011364 LOGE("Failed to set stream type mask in the parameters");
11365 return BAD_VALUE;
11366 }
11367
11368 if (mUpdateDebugLevel) {
11369 uint32_t dummyDebugLevel = 0;
11370 /* The value of dummyDebugLevel is irrelavent. On
11371 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11373 dummyDebugLevel)) {
11374 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11375 return BAD_VALUE;
11376 }
11377 mUpdateDebugLevel = false;
11378 }
11379
11380 if(request->settings != NULL){
11381 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11382 if (blob_request)
11383 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11384 }
11385
11386 return rc;
11387}
11388
11389/*===========================================================================
11390 * FUNCTION : setReprocParameters
11391 *
11392 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11393 * return it.
11394 *
11395 * PARAMETERS :
11396 * @request : request that needs to be serviced
11397 *
11398 * RETURN : success: NO_ERROR
11399 * failure:
11400 *==========================================================================*/
11401int32_t QCamera3HardwareInterface::setReprocParameters(
11402 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11403 uint32_t snapshotStreamId)
11404{
11405 /*translate from camera_metadata_t type to parm_type_t*/
11406 int rc = 0;
11407
11408 if (NULL == request->settings){
11409 LOGE("Reprocess settings cannot be NULL");
11410 return BAD_VALUE;
11411 }
11412
11413 if (NULL == reprocParam) {
11414 LOGE("Invalid reprocessing metadata buffer");
11415 return BAD_VALUE;
11416 }
11417 clear_metadata_buffer(reprocParam);
11418
11419 /*we need to update the frame number in the parameters*/
11420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11421 request->frame_number)) {
11422 LOGE("Failed to set the frame number in the parameters");
11423 return BAD_VALUE;
11424 }
11425
11426 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11427 if (rc < 0) {
11428 LOGE("Failed to translate reproc request");
11429 return rc;
11430 }
11431
11432 CameraMetadata frame_settings;
11433 frame_settings = request->settings;
11434 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11435 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11436 int32_t *crop_count =
11437 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11438 int32_t *crop_data =
11439 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11440 int32_t *roi_map =
11441 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11442 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11443 cam_crop_data_t crop_meta;
11444 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11445 crop_meta.num_of_streams = 1;
11446 crop_meta.crop_info[0].crop.left = crop_data[0];
11447 crop_meta.crop_info[0].crop.top = crop_data[1];
11448 crop_meta.crop_info[0].crop.width = crop_data[2];
11449 crop_meta.crop_info[0].crop.height = crop_data[3];
11450
11451 crop_meta.crop_info[0].roi_map.left =
11452 roi_map[0];
11453 crop_meta.crop_info[0].roi_map.top =
11454 roi_map[1];
11455 crop_meta.crop_info[0].roi_map.width =
11456 roi_map[2];
11457 crop_meta.crop_info[0].roi_map.height =
11458 roi_map[3];
11459
11460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11461 rc = BAD_VALUE;
11462 }
11463 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11464 request->input_buffer->stream,
11465 crop_meta.crop_info[0].crop.left,
11466 crop_meta.crop_info[0].crop.top,
11467 crop_meta.crop_info[0].crop.width,
11468 crop_meta.crop_info[0].crop.height);
11469 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11470 request->input_buffer->stream,
11471 crop_meta.crop_info[0].roi_map.left,
11472 crop_meta.crop_info[0].roi_map.top,
11473 crop_meta.crop_info[0].roi_map.width,
11474 crop_meta.crop_info[0].roi_map.height);
11475 } else {
11476 LOGE("Invalid reprocess crop count %d!", *crop_count);
11477 }
11478 } else {
11479 LOGE("No crop data from matching output stream");
11480 }
11481
11482 /* These settings are not needed for regular requests so handle them specially for
11483 reprocess requests; information needed for EXIF tags */
11484 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11485 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11486 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11487 if (NAME_NOT_FOUND != val) {
11488 uint32_t flashMode = (uint32_t)val;
11489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11490 rc = BAD_VALUE;
11491 }
11492 } else {
11493 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11494 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11495 }
11496 } else {
11497 LOGH("No flash mode in reprocess settings");
11498 }
11499
11500 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11501 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11503 rc = BAD_VALUE;
11504 }
11505 } else {
11506 LOGH("No flash state in reprocess settings");
11507 }
11508
11509 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11510 uint8_t *reprocessFlags =
11511 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11512 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11513 *reprocessFlags)) {
11514 rc = BAD_VALUE;
11515 }
11516 }
11517
Thierry Strudel54dc9782017-02-15 12:12:10 -080011518 // Add exif debug data to internal metadata
11519 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11520 mm_jpeg_debug_exif_params_t *debug_params =
11521 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11522 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11523 // AE
11524 if (debug_params->ae_debug_params_valid == TRUE) {
11525 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11526 debug_params->ae_debug_params);
11527 }
11528 // AWB
11529 if (debug_params->awb_debug_params_valid == TRUE) {
11530 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11531 debug_params->awb_debug_params);
11532 }
11533 // AF
11534 if (debug_params->af_debug_params_valid == TRUE) {
11535 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11536 debug_params->af_debug_params);
11537 }
11538 // ASD
11539 if (debug_params->asd_debug_params_valid == TRUE) {
11540 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11541 debug_params->asd_debug_params);
11542 }
11543 // Stats
11544 if (debug_params->stats_debug_params_valid == TRUE) {
11545 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11546 debug_params->stats_debug_params);
11547 }
11548 // BE Stats
11549 if (debug_params->bestats_debug_params_valid == TRUE) {
11550 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11551 debug_params->bestats_debug_params);
11552 }
11553 // BHIST
11554 if (debug_params->bhist_debug_params_valid == TRUE) {
11555 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11556 debug_params->bhist_debug_params);
11557 }
11558 // 3A Tuning
11559 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11560 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11561 debug_params->q3a_tuning_debug_params);
11562 }
11563 }
11564
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011565 // Add metadata which reprocess needs
11566 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11567 cam_reprocess_info_t *repro_info =
11568 (cam_reprocess_info_t *)frame_settings.find
11569 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011570 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011571 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011573 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011574 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011575 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011576 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011577 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011578 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011579 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011580 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011581 repro_info->pipeline_flip);
11582 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11583 repro_info->af_roi);
11584 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11585 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011586 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11587 CAM_INTF_PARM_ROTATION metadata then has been added in
11588 translateToHalMetadata. HAL need to keep this new rotation
11589 metadata. Otherwise, the old rotation info saved in the vendor tag
11590 would be used */
11591 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11592 CAM_INTF_PARM_ROTATION, reprocParam) {
11593 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11594 } else {
11595 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011596 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011597 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011598 }
11599
11600 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11601 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11602 roi.width and roi.height would be the final JPEG size.
11603 For now, HAL only checks this for reprocess request */
11604 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11605 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11606 uint8_t *enable =
11607 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11608 if (*enable == TRUE) {
11609 int32_t *crop_data =
11610 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11611 cam_stream_crop_info_t crop_meta;
11612 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11613 crop_meta.stream_id = 0;
11614 crop_meta.crop.left = crop_data[0];
11615 crop_meta.crop.top = crop_data[1];
11616 crop_meta.crop.width = crop_data[2];
11617 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011618 // The JPEG crop roi should match cpp output size
11619 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11620 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11621 crop_meta.roi_map.left = 0;
11622 crop_meta.roi_map.top = 0;
11623 crop_meta.roi_map.width = cpp_crop->crop.width;
11624 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 }
11626 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11627 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011628 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011629 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011630 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11631 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011632 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011633 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11634
11635 // Add JPEG scale information
11636 cam_dimension_t scale_dim;
11637 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11638 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11639 int32_t *roi =
11640 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11641 scale_dim.width = roi[2];
11642 scale_dim.height = roi[3];
11643 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11644 scale_dim);
11645 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11646 scale_dim.width, scale_dim.height, mCameraId);
11647 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011648 }
11649 }
11650
11651 return rc;
11652}
11653
11654/*===========================================================================
11655 * FUNCTION : saveRequestSettings
11656 *
11657 * DESCRIPTION: Add any settings that might have changed to the request settings
11658 * and save the settings to be applied on the frame
11659 *
11660 * PARAMETERS :
11661 * @jpegMetadata : the extracted and/or modified jpeg metadata
11662 * @request : request with initial settings
11663 *
11664 * RETURN :
11665 * camera_metadata_t* : pointer to the saved request settings
11666 *==========================================================================*/
11667camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11668 const CameraMetadata &jpegMetadata,
11669 camera3_capture_request_t *request)
11670{
11671 camera_metadata_t *resultMetadata;
11672 CameraMetadata camMetadata;
11673 camMetadata = request->settings;
11674
11675 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11676 int32_t thumbnail_size[2];
11677 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11678 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11679 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11680 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11681 }
11682
11683 if (request->input_buffer != NULL) {
11684 uint8_t reprocessFlags = 1;
11685 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11686 (uint8_t*)&reprocessFlags,
11687 sizeof(reprocessFlags));
11688 }
11689
11690 resultMetadata = camMetadata.release();
11691 return resultMetadata;
11692}
11693
11694/*===========================================================================
11695 * FUNCTION : setHalFpsRange
11696 *
11697 * DESCRIPTION: set FPS range parameter
11698 *
11699 *
11700 * PARAMETERS :
11701 * @settings : Metadata from framework
11702 * @hal_metadata: Metadata buffer
11703 *
11704 *
11705 * RETURN : success: NO_ERROR
11706 * failure:
11707 *==========================================================================*/
11708int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11709 metadata_buffer_t *hal_metadata)
11710{
11711 int32_t rc = NO_ERROR;
11712 cam_fps_range_t fps_range;
11713 fps_range.min_fps = (float)
11714 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11715 fps_range.max_fps = (float)
11716 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11717 fps_range.video_min_fps = fps_range.min_fps;
11718 fps_range.video_max_fps = fps_range.max_fps;
11719
11720 LOGD("aeTargetFpsRange fps: [%f %f]",
11721 fps_range.min_fps, fps_range.max_fps);
11722 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11723 * follows:
11724 * ---------------------------------------------------------------|
11725 * Video stream is absent in configure_streams |
11726 * (Camcorder preview before the first video record |
11727 * ---------------------------------------------------------------|
11728 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11729 * | | | vid_min/max_fps|
11730 * ---------------------------------------------------------------|
11731 * NO | [ 30, 240] | 240 | [240, 240] |
11732 * |-------------|-------------|----------------|
11733 * | [240, 240] | 240 | [240, 240] |
11734 * ---------------------------------------------------------------|
11735 * Video stream is present in configure_streams |
11736 * ---------------------------------------------------------------|
11737 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11738 * | | | vid_min/max_fps|
11739 * ---------------------------------------------------------------|
11740 * NO | [ 30, 240] | 240 | [240, 240] |
11741 * (camcorder prev |-------------|-------------|----------------|
11742 * after video rec | [240, 240] | 240 | [240, 240] |
11743 * is stopped) | | | |
11744 * ---------------------------------------------------------------|
11745 * YES | [ 30, 240] | 240 | [240, 240] |
11746 * |-------------|-------------|----------------|
11747 * | [240, 240] | 240 | [240, 240] |
11748 * ---------------------------------------------------------------|
11749 * When Video stream is absent in configure_streams,
11750 * preview fps = sensor_fps / batchsize
11751 * Eg: for 240fps at batchSize 4, preview = 60fps
11752 * for 120fps at batchSize 4, preview = 30fps
11753 *
11754 * When video stream is present in configure_streams, preview fps is as per
11755 * the ratio of preview buffers to video buffers requested in process
11756 * capture request
11757 */
11758 mBatchSize = 0;
11759 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11760 fps_range.min_fps = fps_range.video_max_fps;
11761 fps_range.video_min_fps = fps_range.video_max_fps;
11762 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11763 fps_range.max_fps);
11764 if (NAME_NOT_FOUND != val) {
11765 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11766 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11767 return BAD_VALUE;
11768 }
11769
11770 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11771 /* If batchmode is currently in progress and the fps changes,
11772 * set the flag to restart the sensor */
11773 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11774 (mHFRVideoFps != fps_range.max_fps)) {
11775 mNeedSensorRestart = true;
11776 }
11777 mHFRVideoFps = fps_range.max_fps;
11778 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11779 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11780 mBatchSize = MAX_HFR_BATCH_SIZE;
11781 }
11782 }
11783 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11784
11785 }
11786 } else {
11787 /* HFR mode is session param in backend/ISP. This should be reset when
11788 * in non-HFR mode */
11789 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11791 return BAD_VALUE;
11792 }
11793 }
11794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11795 return BAD_VALUE;
11796 }
11797 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11798 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11799 return rc;
11800}
11801
11802/*===========================================================================
11803 * FUNCTION : translateToHalMetadata
11804 *
11805 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11806 *
11807 *
11808 * PARAMETERS :
11809 * @request : request sent from framework
11810 *
11811 *
11812 * RETURN : success: NO_ERROR
11813 * failure:
11814 *==========================================================================*/
11815int QCamera3HardwareInterface::translateToHalMetadata
11816 (const camera3_capture_request_t *request,
11817 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011818 uint32_t snapshotStreamId) {
11819 if (request == nullptr || hal_metadata == nullptr) {
11820 return BAD_VALUE;
11821 }
11822
11823 int64_t minFrameDuration = getMinFrameDuration(request);
11824
11825 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11826 minFrameDuration);
11827}
11828
11829int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11830 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11831 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11832
Thierry Strudel3d639192016-09-09 11:52:26 -070011833 int rc = 0;
11834 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011835 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011836
11837 /* Do not change the order of the following list unless you know what you are
11838 * doing.
11839 * The order is laid out in such a way that parameters in the front of the table
11840 * may be used to override the parameters later in the table. Examples are:
11841 * 1. META_MODE should precede AEC/AWB/AF MODE
11842 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11843 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11844 * 4. Any mode should precede it's corresponding settings
11845 */
11846 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11847 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11849 rc = BAD_VALUE;
11850 }
11851 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11852 if (rc != NO_ERROR) {
11853 LOGE("extractSceneMode failed");
11854 }
11855 }
11856
11857 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11858 uint8_t fwk_aeMode =
11859 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11860 uint8_t aeMode;
11861 int32_t redeye;
11862
11863 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11864 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011865 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11866 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011867 } else {
11868 aeMode = CAM_AE_MODE_ON;
11869 }
11870 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11871 redeye = 1;
11872 } else {
11873 redeye = 0;
11874 }
11875
11876 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11877 fwk_aeMode);
11878 if (NAME_NOT_FOUND != val) {
11879 int32_t flashMode = (int32_t)val;
11880 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11881 }
11882
11883 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11885 rc = BAD_VALUE;
11886 }
11887 }
11888
11889 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11890 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11891 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11892 fwk_whiteLevel);
11893 if (NAME_NOT_FOUND != val) {
11894 uint8_t whiteLevel = (uint8_t)val;
11895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11896 rc = BAD_VALUE;
11897 }
11898 }
11899 }
11900
11901 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11902 uint8_t fwk_cacMode =
11903 frame_settings.find(
11904 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11905 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11906 fwk_cacMode);
11907 if (NAME_NOT_FOUND != val) {
11908 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11909 bool entryAvailable = FALSE;
11910 // Check whether Frameworks set CAC mode is supported in device or not
11911 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11912 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11913 entryAvailable = TRUE;
11914 break;
11915 }
11916 }
11917 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11918 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11919 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11920 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11921 if (entryAvailable == FALSE) {
11922 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11923 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11924 } else {
11925 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11926 // High is not supported and so set the FAST as spec say's underlying
11927 // device implementation can be the same for both modes.
11928 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11929 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11930 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11931 // in order to avoid the fps drop due to high quality
11932 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11933 } else {
11934 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11935 }
11936 }
11937 }
11938 LOGD("Final cacMode is %d", cacMode);
11939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11940 rc = BAD_VALUE;
11941 }
11942 } else {
11943 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11944 }
11945 }
11946
Thierry Strudel2896d122017-02-23 19:18:03 -080011947 char af_value[PROPERTY_VALUE_MAX];
11948 property_get("persist.camera.af.infinity", af_value, "0");
11949
Jason Lee84ae9972017-02-24 13:24:24 -080011950 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011951 if (atoi(af_value) == 0) {
11952 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011953 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011954 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11955 fwk_focusMode);
11956 if (NAME_NOT_FOUND != val) {
11957 uint8_t focusMode = (uint8_t)val;
11958 LOGD("set focus mode %d", focusMode);
11959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11960 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11961 rc = BAD_VALUE;
11962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 }
11964 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011965 } else {
11966 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11967 LOGE("Focus forced to infinity %d", focusMode);
11968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11969 rc = BAD_VALUE;
11970 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011971 }
11972
Jason Lee84ae9972017-02-24 13:24:24 -080011973 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11974 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011975 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11976 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11977 focalDistance)) {
11978 rc = BAD_VALUE;
11979 }
11980 }
11981
11982 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11983 uint8_t fwk_antibandingMode =
11984 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11985 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11986 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11987 if (NAME_NOT_FOUND != val) {
11988 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011989 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11990 if (m60HzZone) {
11991 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11992 } else {
11993 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11994 }
11995 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011996 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11997 hal_antibandingMode)) {
11998 rc = BAD_VALUE;
11999 }
12000 }
12001 }
12002
12003 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12004 int32_t expCompensation = frame_settings.find(
12005 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12006 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12007 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12008 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12009 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012010 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12012 expCompensation)) {
12013 rc = BAD_VALUE;
12014 }
12015 }
12016
12017 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12018 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12020 rc = BAD_VALUE;
12021 }
12022 }
12023 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12024 rc = setHalFpsRange(frame_settings, hal_metadata);
12025 if (rc != NO_ERROR) {
12026 LOGE("setHalFpsRange failed");
12027 }
12028 }
12029
12030 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12031 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12033 rc = BAD_VALUE;
12034 }
12035 }
12036
12037 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12038 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12039 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12040 fwk_effectMode);
12041 if (NAME_NOT_FOUND != val) {
12042 uint8_t effectMode = (uint8_t)val;
12043 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12044 rc = BAD_VALUE;
12045 }
12046 }
12047 }
12048
12049 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12050 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12052 colorCorrectMode)) {
12053 rc = BAD_VALUE;
12054 }
12055 }
12056
12057 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12058 cam_color_correct_gains_t colorCorrectGains;
12059 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12060 colorCorrectGains.gains[i] =
12061 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12062 }
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12064 colorCorrectGains)) {
12065 rc = BAD_VALUE;
12066 }
12067 }
12068
12069 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12070 cam_color_correct_matrix_t colorCorrectTransform;
12071 cam_rational_type_t transform_elem;
12072 size_t num = 0;
12073 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12074 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12075 transform_elem.numerator =
12076 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12077 transform_elem.denominator =
12078 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12079 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12080 num++;
12081 }
12082 }
12083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12084 colorCorrectTransform)) {
12085 rc = BAD_VALUE;
12086 }
12087 }
12088
12089 cam_trigger_t aecTrigger;
12090 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12091 aecTrigger.trigger_id = -1;
12092 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12093 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12094 aecTrigger.trigger =
12095 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12096 aecTrigger.trigger_id =
12097 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12099 aecTrigger)) {
12100 rc = BAD_VALUE;
12101 }
12102 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12103 aecTrigger.trigger, aecTrigger.trigger_id);
12104 }
12105
12106 /*af_trigger must come with a trigger id*/
12107 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12108 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12109 cam_trigger_t af_trigger;
12110 af_trigger.trigger =
12111 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12112 af_trigger.trigger_id =
12113 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12115 rc = BAD_VALUE;
12116 }
12117 LOGD("AfTrigger: %d AfTriggerID: %d",
12118 af_trigger.trigger, af_trigger.trigger_id);
12119 }
12120
12121 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12122 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12124 rc = BAD_VALUE;
12125 }
12126 }
12127 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12128 cam_edge_application_t edge_application;
12129 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012130
Thierry Strudel3d639192016-09-09 11:52:26 -070012131 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12132 edge_application.sharpness = 0;
12133 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012134 edge_application.sharpness =
12135 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12136 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12137 int32_t sharpness =
12138 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12139 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12140 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12141 LOGD("Setting edge mode sharpness %d", sharpness);
12142 edge_application.sharpness = sharpness;
12143 }
12144 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012145 }
12146 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12147 rc = BAD_VALUE;
12148 }
12149 }
12150
12151 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12152 int32_t respectFlashMode = 1;
12153 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12154 uint8_t fwk_aeMode =
12155 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012156 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12157 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12158 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012159 respectFlashMode = 0;
12160 LOGH("AE Mode controls flash, ignore android.flash.mode");
12161 }
12162 }
12163 if (respectFlashMode) {
12164 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12165 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12166 LOGH("flash mode after mapping %d", val);
12167 // To check: CAM_INTF_META_FLASH_MODE usage
12168 if (NAME_NOT_FOUND != val) {
12169 uint8_t flashMode = (uint8_t)val;
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12171 rc = BAD_VALUE;
12172 }
12173 }
12174 }
12175 }
12176
12177 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12178 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12180 rc = BAD_VALUE;
12181 }
12182 }
12183
12184 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12185 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12186 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12187 flashFiringTime)) {
12188 rc = BAD_VALUE;
12189 }
12190 }
12191
12192 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12193 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12195 hotPixelMode)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
12200 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12201 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12203 lensAperture)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12209 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12211 filterDensity)) {
12212 rc = BAD_VALUE;
12213 }
12214 }
12215
12216 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12217 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12219 focalLength)) {
12220 rc = BAD_VALUE;
12221 }
12222 }
12223
12224 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12225 uint8_t optStabMode =
12226 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12228 optStabMode)) {
12229 rc = BAD_VALUE;
12230 }
12231 }
12232
12233 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12234 uint8_t videoStabMode =
12235 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12236 LOGD("videoStabMode from APP = %d", videoStabMode);
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12238 videoStabMode)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
12243
12244 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12245 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12247 noiseRedMode)) {
12248 rc = BAD_VALUE;
12249 }
12250 }
12251
12252 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12253 float reprocessEffectiveExposureFactor =
12254 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12256 reprocessEffectiveExposureFactor)) {
12257 rc = BAD_VALUE;
12258 }
12259 }
12260
12261 cam_crop_region_t scalerCropRegion;
12262 bool scalerCropSet = false;
12263 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12264 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12265 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12266 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12267 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12268
12269 // Map coordinate system from active array to sensor output.
12270 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12271 scalerCropRegion.width, scalerCropRegion.height);
12272
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12274 scalerCropRegion)) {
12275 rc = BAD_VALUE;
12276 }
12277 scalerCropSet = true;
12278 }
12279
12280 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12281 int64_t sensorExpTime =
12282 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12283 LOGD("setting sensorExpTime %lld", sensorExpTime);
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12285 sensorExpTime)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289
12290 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12291 int64_t sensorFrameDuration =
12292 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012293 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12294 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12295 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12296 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12298 sensorFrameDuration)) {
12299 rc = BAD_VALUE;
12300 }
12301 }
12302
12303 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12304 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12305 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12306 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12307 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12308 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12309 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12311 sensorSensitivity)) {
12312 rc = BAD_VALUE;
12313 }
12314 }
12315
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012316#ifndef USE_HAL_3_3
12317 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12318 int32_t ispSensitivity =
12319 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12320 if (ispSensitivity <
12321 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12322 ispSensitivity =
12323 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12324 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12325 }
12326 if (ispSensitivity >
12327 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12328 ispSensitivity =
12329 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12330 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12331 }
12332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12333 ispSensitivity)) {
12334 rc = BAD_VALUE;
12335 }
12336 }
12337#endif
12338
Thierry Strudel3d639192016-09-09 11:52:26 -070012339 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12340 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12342 rc = BAD_VALUE;
12343 }
12344 }
12345
12346 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12347 uint8_t fwk_facedetectMode =
12348 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12349
12350 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12351 fwk_facedetectMode);
12352
12353 if (NAME_NOT_FOUND != val) {
12354 uint8_t facedetectMode = (uint8_t)val;
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12356 facedetectMode)) {
12357 rc = BAD_VALUE;
12358 }
12359 }
12360 }
12361
Thierry Strudel54dc9782017-02-15 12:12:10 -080012362 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012363 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012364 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12366 histogramMode)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370
12371 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12372 uint8_t sharpnessMapMode =
12373 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12375 sharpnessMapMode)) {
12376 rc = BAD_VALUE;
12377 }
12378 }
12379
12380 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12381 uint8_t tonemapMode =
12382 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12384 rc = BAD_VALUE;
12385 }
12386 }
12387 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12388 /*All tonemap channels will have the same number of points*/
12389 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12390 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12391 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12392 cam_rgb_tonemap_curves tonemapCurves;
12393 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12394 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12395 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12396 tonemapCurves.tonemap_points_cnt,
12397 CAM_MAX_TONEMAP_CURVE_SIZE);
12398 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12399 }
12400
12401 /* ch0 = G*/
12402 size_t point = 0;
12403 cam_tonemap_curve_t tonemapCurveGreen;
12404 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12405 for (size_t j = 0; j < 2; j++) {
12406 tonemapCurveGreen.tonemap_points[i][j] =
12407 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12408 point++;
12409 }
12410 }
12411 tonemapCurves.curves[0] = tonemapCurveGreen;
12412
12413 /* ch 1 = B */
12414 point = 0;
12415 cam_tonemap_curve_t tonemapCurveBlue;
12416 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12417 for (size_t j = 0; j < 2; j++) {
12418 tonemapCurveBlue.tonemap_points[i][j] =
12419 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12420 point++;
12421 }
12422 }
12423 tonemapCurves.curves[1] = tonemapCurveBlue;
12424
12425 /* ch 2 = R */
12426 point = 0;
12427 cam_tonemap_curve_t tonemapCurveRed;
12428 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12429 for (size_t j = 0; j < 2; j++) {
12430 tonemapCurveRed.tonemap_points[i][j] =
12431 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12432 point++;
12433 }
12434 }
12435 tonemapCurves.curves[2] = tonemapCurveRed;
12436
12437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12438 tonemapCurves)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12444 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12446 captureIntent)) {
12447 rc = BAD_VALUE;
12448 }
12449 }
12450
12451 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12452 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12454 blackLevelLock)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12460 uint8_t lensShadingMapMode =
12461 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12462 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12463 lensShadingMapMode)) {
12464 rc = BAD_VALUE;
12465 }
12466 }
12467
12468 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12469 cam_area_t roi;
12470 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012471 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012472
12473 // Map coordinate system from active array to sensor output.
12474 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12475 roi.rect.height);
12476
12477 if (scalerCropSet) {
12478 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12479 }
12480 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12481 rc = BAD_VALUE;
12482 }
12483 }
12484
12485 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12486 cam_area_t roi;
12487 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012488 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012489
12490 // Map coordinate system from active array to sensor output.
12491 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12492 roi.rect.height);
12493
12494 if (scalerCropSet) {
12495 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12496 }
12497 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
12502 // CDS for non-HFR non-video mode
12503 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12504 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12505 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12506 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12507 LOGE("Invalid CDS mode %d!", *fwk_cds);
12508 } else {
12509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12510 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12511 rc = BAD_VALUE;
12512 }
12513 }
12514 }
12515
Thierry Strudel04e026f2016-10-10 11:27:36 -070012516 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012517 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012518 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012519 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12520 }
12521 if (m_bVideoHdrEnabled)
12522 vhdr = CAM_VIDEO_HDR_MODE_ON;
12523
Thierry Strudel54dc9782017-02-15 12:12:10 -080012524 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12525
12526 if(vhdr != curr_hdr_state)
12527 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12528
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012529 rc = setVideoHdrMode(mParameters, vhdr);
12530 if (rc != NO_ERROR) {
12531 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012532 }
12533
12534 //IR
12535 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12536 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12537 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012538 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12539 uint8_t isIRon = 0;
12540
12541 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012542 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12543 LOGE("Invalid IR mode %d!", fwk_ir);
12544 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012545 if(isIRon != curr_ir_state )
12546 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12547
Thierry Strudel04e026f2016-10-10 11:27:36 -070012548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12549 CAM_INTF_META_IR_MODE, fwk_ir)) {
12550 rc = BAD_VALUE;
12551 }
12552 }
12553 }
12554
Thierry Strudel54dc9782017-02-15 12:12:10 -080012555 //Binning Correction Mode
12556 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12557 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12558 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12559 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12560 || (0 > fwk_binning_correction)) {
12561 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12562 } else {
12563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12564 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12565 rc = BAD_VALUE;
12566 }
12567 }
12568 }
12569
Thierry Strudel269c81a2016-10-12 12:13:59 -070012570 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12571 float aec_speed;
12572 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12573 LOGD("AEC Speed :%f", aec_speed);
12574 if ( aec_speed < 0 ) {
12575 LOGE("Invalid AEC mode %f!", aec_speed);
12576 } else {
12577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12578 aec_speed)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582 }
12583
12584 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12585 float awb_speed;
12586 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12587 LOGD("AWB Speed :%f", awb_speed);
12588 if ( awb_speed < 0 ) {
12589 LOGE("Invalid AWB mode %f!", awb_speed);
12590 } else {
12591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12592 awb_speed)) {
12593 rc = BAD_VALUE;
12594 }
12595 }
12596 }
12597
Thierry Strudel3d639192016-09-09 11:52:26 -070012598 // TNR
12599 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12600 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12601 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012602 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012603 cam_denoise_param_t tnr;
12604 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12605 tnr.process_plates =
12606 (cam_denoise_process_type_t)frame_settings.find(
12607 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12608 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012609
12610 if(b_TnrRequested != curr_tnr_state)
12611 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12612
Thierry Strudel3d639192016-09-09 11:52:26 -070012613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12614 rc = BAD_VALUE;
12615 }
12616 }
12617
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012619 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012620 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12622 *exposure_metering_mode)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
Thierry Strudel3d639192016-09-09 11:52:26 -070012627 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12628 int32_t fwk_testPatternMode =
12629 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12630 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12631 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12632
12633 if (NAME_NOT_FOUND != testPatternMode) {
12634 cam_test_pattern_data_t testPatternData;
12635 memset(&testPatternData, 0, sizeof(testPatternData));
12636 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12637 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12638 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12639 int32_t *fwk_testPatternData =
12640 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12641 testPatternData.r = fwk_testPatternData[0];
12642 testPatternData.b = fwk_testPatternData[3];
12643 switch (gCamCapability[mCameraId]->color_arrangement) {
12644 case CAM_FILTER_ARRANGEMENT_RGGB:
12645 case CAM_FILTER_ARRANGEMENT_GRBG:
12646 testPatternData.gr = fwk_testPatternData[1];
12647 testPatternData.gb = fwk_testPatternData[2];
12648 break;
12649 case CAM_FILTER_ARRANGEMENT_GBRG:
12650 case CAM_FILTER_ARRANGEMENT_BGGR:
12651 testPatternData.gr = fwk_testPatternData[2];
12652 testPatternData.gb = fwk_testPatternData[1];
12653 break;
12654 default:
12655 LOGE("color arrangement %d is not supported",
12656 gCamCapability[mCameraId]->color_arrangement);
12657 break;
12658 }
12659 }
12660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12661 testPatternData)) {
12662 rc = BAD_VALUE;
12663 }
12664 } else {
12665 LOGE("Invalid framework sensor test pattern mode %d",
12666 fwk_testPatternMode);
12667 }
12668 }
12669
12670 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12671 size_t count = 0;
12672 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12673 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12674 gps_coords.data.d, gps_coords.count, count);
12675 if (gps_coords.count != count) {
12676 rc = BAD_VALUE;
12677 }
12678 }
12679
12680 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12681 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12682 size_t count = 0;
12683 const char *gps_methods_src = (const char *)
12684 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12685 memset(gps_methods, '\0', sizeof(gps_methods));
12686 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12687 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12688 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12689 if (GPS_PROCESSING_METHOD_SIZE != count) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693
12694 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12695 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12697 gps_timestamp)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
12702 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12703 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12704 cam_rotation_info_t rotation_info;
12705 if (orientation == 0) {
12706 rotation_info.rotation = ROTATE_0;
12707 } else if (orientation == 90) {
12708 rotation_info.rotation = ROTATE_90;
12709 } else if (orientation == 180) {
12710 rotation_info.rotation = ROTATE_180;
12711 } else if (orientation == 270) {
12712 rotation_info.rotation = ROTATE_270;
12713 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012714 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012715 rotation_info.streamId = snapshotStreamId;
12716 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12718 rc = BAD_VALUE;
12719 }
12720 }
12721
12722 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12723 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12724 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
12728
12729 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12730 uint32_t thumb_quality = (uint32_t)
12731 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12733 thumb_quality)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
12737
12738 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12739 cam_dimension_t dim;
12740 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12741 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12742 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
12747 // Internal metadata
12748 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12749 size_t count = 0;
12750 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12751 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12752 privatedata.data.i32, privatedata.count, count);
12753 if (privatedata.count != count) {
12754 rc = BAD_VALUE;
12755 }
12756 }
12757
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012758 // ISO/Exposure Priority
12759 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12760 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12761 cam_priority_mode_t mode =
12762 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12763 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12764 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12765 use_iso_exp_pty.previewOnly = FALSE;
12766 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12767 use_iso_exp_pty.value = *ptr;
12768
12769 if(CAM_ISO_PRIORITY == mode) {
12770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12771 use_iso_exp_pty)) {
12772 rc = BAD_VALUE;
12773 }
12774 }
12775 else {
12776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12777 use_iso_exp_pty)) {
12778 rc = BAD_VALUE;
12779 }
12780 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012781
12782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12783 rc = BAD_VALUE;
12784 }
12785 }
12786 } else {
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12788 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012789 }
12790 }
12791
12792 // Saturation
12793 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12794 int32_t* use_saturation =
12795 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12797 rc = BAD_VALUE;
12798 }
12799 }
12800
Thierry Strudel3d639192016-09-09 11:52:26 -070012801 // EV step
12802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12803 gCamCapability[mCameraId]->exp_compensation_step)) {
12804 rc = BAD_VALUE;
12805 }
12806
12807 // CDS info
12808 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12809 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12810 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12811
12812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12813 CAM_INTF_META_CDS_DATA, *cdsData)) {
12814 rc = BAD_VALUE;
12815 }
12816 }
12817
Shuzhen Wang19463d72016-03-08 11:09:52 -080012818 // Hybrid AE
12819 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12820 uint8_t *hybrid_ae = (uint8_t *)
12821 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12822
12823 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12824 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12825 rc = BAD_VALUE;
12826 }
12827 }
12828
Shuzhen Wang14415f52016-11-16 18:26:18 -080012829 // Histogram
12830 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12831 uint8_t histogramMode =
12832 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12834 histogramMode)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838
12839 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12840 int32_t histogramBins =
12841 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12843 histogramBins)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
12847
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012848 // Tracking AF
12849 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12850 uint8_t trackingAfTrigger =
12851 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12853 trackingAfTrigger)) {
12854 rc = BAD_VALUE;
12855 }
12856 }
12857
Thierry Strudel3d639192016-09-09 11:52:26 -070012858 return rc;
12859}
12860
12861/*===========================================================================
12862 * FUNCTION : captureResultCb
12863 *
12864 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12865 *
12866 * PARAMETERS :
12867 * @frame : frame information from mm-camera-interface
12868 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12869 * @userdata: userdata
12870 *
12871 * RETURN : NONE
12872 *==========================================================================*/
12873void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12874 camera3_stream_buffer_t *buffer,
12875 uint32_t frame_number, bool isInputBuffer, void *userdata)
12876{
12877 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12878 if (hw == NULL) {
12879 LOGE("Invalid hw %p", hw);
12880 return;
12881 }
12882
12883 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12884 return;
12885}
12886
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012887/*===========================================================================
12888 * FUNCTION : setBufferErrorStatus
12889 *
12890 * DESCRIPTION: Callback handler for channels to report any buffer errors
12891 *
12892 * PARAMETERS :
12893 * @ch : Channel on which buffer error is reported from
12894 * @frame_number : frame number on which buffer error is reported on
12895 * @buffer_status : buffer error status
12896 * @userdata: userdata
12897 *
12898 * RETURN : NONE
12899 *==========================================================================*/
12900void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12901 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12902{
12903 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12904 if (hw == NULL) {
12905 LOGE("Invalid hw %p", hw);
12906 return;
12907 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012908
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012909 hw->setBufferErrorStatus(ch, frame_number, err);
12910 return;
12911}
12912
12913void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12914 uint32_t frameNumber, camera3_buffer_status_t err)
12915{
12916 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12917 pthread_mutex_lock(&mMutex);
12918
12919 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12920 if (req.frame_number != frameNumber)
12921 continue;
12922 for (auto& k : req.mPendingBufferList) {
12923 if(k.stream->priv == ch) {
12924 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12925 }
12926 }
12927 }
12928
12929 pthread_mutex_unlock(&mMutex);
12930 return;
12931}
Thierry Strudel3d639192016-09-09 11:52:26 -070012932/*===========================================================================
12933 * FUNCTION : initialize
12934 *
12935 * DESCRIPTION: Pass framework callback pointers to HAL
12936 *
12937 * PARAMETERS :
12938 *
12939 *
12940 * RETURN : Success : 0
12941 * Failure: -ENODEV
12942 *==========================================================================*/
12943
12944int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12945 const camera3_callback_ops_t *callback_ops)
12946{
12947 LOGD("E");
12948 QCamera3HardwareInterface *hw =
12949 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12950 if (!hw) {
12951 LOGE("NULL camera device");
12952 return -ENODEV;
12953 }
12954
12955 int rc = hw->initialize(callback_ops);
12956 LOGD("X");
12957 return rc;
12958}
12959
12960/*===========================================================================
12961 * FUNCTION : configure_streams
12962 *
12963 * DESCRIPTION:
12964 *
12965 * PARAMETERS :
12966 *
12967 *
12968 * RETURN : Success: 0
12969 * Failure: -EINVAL (if stream configuration is invalid)
12970 * -ENODEV (fatal error)
12971 *==========================================================================*/
12972
12973int QCamera3HardwareInterface::configure_streams(
12974 const struct camera3_device *device,
12975 camera3_stream_configuration_t *stream_list)
12976{
12977 LOGD("E");
12978 QCamera3HardwareInterface *hw =
12979 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12980 if (!hw) {
12981 LOGE("NULL camera device");
12982 return -ENODEV;
12983 }
12984 int rc = hw->configureStreams(stream_list);
12985 LOGD("X");
12986 return rc;
12987}
12988
12989/*===========================================================================
12990 * FUNCTION : construct_default_request_settings
12991 *
12992 * DESCRIPTION: Configure a settings buffer to meet the required use case
12993 *
12994 * PARAMETERS :
12995 *
12996 *
12997 * RETURN : Success: Return valid metadata
12998 * Failure: Return NULL
12999 *==========================================================================*/
13000const camera_metadata_t* QCamera3HardwareInterface::
13001 construct_default_request_settings(const struct camera3_device *device,
13002 int type)
13003{
13004
13005 LOGD("E");
13006 camera_metadata_t* fwk_metadata = NULL;
13007 QCamera3HardwareInterface *hw =
13008 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13009 if (!hw) {
13010 LOGE("NULL camera device");
13011 return NULL;
13012 }
13013
13014 fwk_metadata = hw->translateCapabilityToMetadata(type);
13015
13016 LOGD("X");
13017 return fwk_metadata;
13018}
13019
13020/*===========================================================================
13021 * FUNCTION : process_capture_request
13022 *
13023 * DESCRIPTION:
13024 *
13025 * PARAMETERS :
13026 *
13027 *
13028 * RETURN :
13029 *==========================================================================*/
13030int QCamera3HardwareInterface::process_capture_request(
13031 const struct camera3_device *device,
13032 camera3_capture_request_t *request)
13033{
13034 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013035 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013036 QCamera3HardwareInterface *hw =
13037 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13038 if (!hw) {
13039 LOGE("NULL camera device");
13040 return -EINVAL;
13041 }
13042
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013043 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013044 LOGD("X");
13045 return rc;
13046}
13047
13048/*===========================================================================
13049 * FUNCTION : dump
13050 *
13051 * DESCRIPTION:
13052 *
13053 * PARAMETERS :
13054 *
13055 *
13056 * RETURN :
13057 *==========================================================================*/
13058
13059void QCamera3HardwareInterface::dump(
13060 const struct camera3_device *device, int fd)
13061{
13062 /* Log level property is read when "adb shell dumpsys media.camera" is
13063 called so that the log level can be controlled without restarting
13064 the media server */
13065 getLogLevel();
13066
13067 LOGD("E");
13068 QCamera3HardwareInterface *hw =
13069 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13070 if (!hw) {
13071 LOGE("NULL camera device");
13072 return;
13073 }
13074
13075 hw->dump(fd);
13076 LOGD("X");
13077 return;
13078}
13079
13080/*===========================================================================
13081 * FUNCTION : flush
13082 *
13083 * DESCRIPTION:
13084 *
13085 * PARAMETERS :
13086 *
13087 *
13088 * RETURN :
13089 *==========================================================================*/
13090
13091int QCamera3HardwareInterface::flush(
13092 const struct camera3_device *device)
13093{
13094 int rc;
13095 LOGD("E");
13096 QCamera3HardwareInterface *hw =
13097 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13098 if (!hw) {
13099 LOGE("NULL camera device");
13100 return -EINVAL;
13101 }
13102
13103 pthread_mutex_lock(&hw->mMutex);
13104 // Validate current state
13105 switch (hw->mState) {
13106 case STARTED:
13107 /* valid state */
13108 break;
13109
13110 case ERROR:
13111 pthread_mutex_unlock(&hw->mMutex);
13112 hw->handleCameraDeviceError();
13113 return -ENODEV;
13114
13115 default:
13116 LOGI("Flush returned during state %d", hw->mState);
13117 pthread_mutex_unlock(&hw->mMutex);
13118 return 0;
13119 }
13120 pthread_mutex_unlock(&hw->mMutex);
13121
13122 rc = hw->flush(true /* restart channels */ );
13123 LOGD("X");
13124 return rc;
13125}
13126
13127/*===========================================================================
13128 * FUNCTION : close_camera_device
13129 *
13130 * DESCRIPTION:
13131 *
13132 * PARAMETERS :
13133 *
13134 *
13135 * RETURN :
13136 *==========================================================================*/
13137int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13138{
13139 int ret = NO_ERROR;
13140 QCamera3HardwareInterface *hw =
13141 reinterpret_cast<QCamera3HardwareInterface *>(
13142 reinterpret_cast<camera3_device_t *>(device)->priv);
13143 if (!hw) {
13144 LOGE("NULL camera device");
13145 return BAD_VALUE;
13146 }
13147
13148 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13149 delete hw;
13150 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013151 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013152 return ret;
13153}
13154
13155/*===========================================================================
13156 * FUNCTION : getWaveletDenoiseProcessPlate
13157 *
13158 * DESCRIPTION: query wavelet denoise process plate
13159 *
13160 * PARAMETERS : None
13161 *
13162 * RETURN : WNR prcocess plate value
13163 *==========================================================================*/
13164cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13165{
13166 char prop[PROPERTY_VALUE_MAX];
13167 memset(prop, 0, sizeof(prop));
13168 property_get("persist.denoise.process.plates", prop, "0");
13169 int processPlate = atoi(prop);
13170 switch(processPlate) {
13171 case 0:
13172 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13173 case 1:
13174 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13175 case 2:
13176 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13177 case 3:
13178 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13179 default:
13180 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13181 }
13182}
13183
13184
13185/*===========================================================================
13186 * FUNCTION : getTemporalDenoiseProcessPlate
13187 *
13188 * DESCRIPTION: query temporal denoise process plate
13189 *
13190 * PARAMETERS : None
13191 *
13192 * RETURN : TNR prcocess plate value
13193 *==========================================================================*/
13194cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13195{
13196 char prop[PROPERTY_VALUE_MAX];
13197 memset(prop, 0, sizeof(prop));
13198 property_get("persist.tnr.process.plates", prop, "0");
13199 int processPlate = atoi(prop);
13200 switch(processPlate) {
13201 case 0:
13202 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13203 case 1:
13204 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13205 case 2:
13206 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13207 case 3:
13208 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13209 default:
13210 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13211 }
13212}
13213
13214
13215/*===========================================================================
13216 * FUNCTION : extractSceneMode
13217 *
13218 * DESCRIPTION: Extract scene mode from frameworks set metadata
13219 *
13220 * PARAMETERS :
13221 * @frame_settings: CameraMetadata reference
13222 * @metaMode: ANDROID_CONTORL_MODE
13223 * @hal_metadata: hal metadata structure
13224 *
13225 * RETURN : None
13226 *==========================================================================*/
13227int32_t QCamera3HardwareInterface::extractSceneMode(
13228 const CameraMetadata &frame_settings, uint8_t metaMode,
13229 metadata_buffer_t *hal_metadata)
13230{
13231 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013232 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13233
13234 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13235 LOGD("Ignoring control mode OFF_KEEP_STATE");
13236 return NO_ERROR;
13237 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013238
13239 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13240 camera_metadata_ro_entry entry =
13241 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13242 if (0 == entry.count)
13243 return rc;
13244
13245 uint8_t fwk_sceneMode = entry.data.u8[0];
13246
13247 int val = lookupHalName(SCENE_MODES_MAP,
13248 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13249 fwk_sceneMode);
13250 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013251 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013252 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013253 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013254 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013255
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013256 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13257 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13258 }
13259
13260 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13261 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013262 cam_hdr_param_t hdr_params;
13263 hdr_params.hdr_enable = 1;
13264 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13265 hdr_params.hdr_need_1x = false;
13266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13267 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13268 rc = BAD_VALUE;
13269 }
13270 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013271
Thierry Strudel3d639192016-09-09 11:52:26 -070013272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13273 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13274 rc = BAD_VALUE;
13275 }
13276 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013277
13278 if (mForceHdrSnapshot) {
13279 cam_hdr_param_t hdr_params;
13280 hdr_params.hdr_enable = 1;
13281 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13282 hdr_params.hdr_need_1x = false;
13283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13284 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13285 rc = BAD_VALUE;
13286 }
13287 }
13288
Thierry Strudel3d639192016-09-09 11:52:26 -070013289 return rc;
13290}
13291
13292/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013293 * FUNCTION : setVideoHdrMode
13294 *
13295 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13296 *
13297 * PARAMETERS :
13298 * @hal_metadata: hal metadata structure
13299 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13300 *
13301 * RETURN : None
13302 *==========================================================================*/
13303int32_t QCamera3HardwareInterface::setVideoHdrMode(
13304 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13305{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013306 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13307 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13308 }
13309
13310 LOGE("Invalid Video HDR mode %d!", vhdr);
13311 return BAD_VALUE;
13312}
13313
13314/*===========================================================================
13315 * FUNCTION : setSensorHDR
13316 *
13317 * DESCRIPTION: Enable/disable sensor HDR.
13318 *
13319 * PARAMETERS :
13320 * @hal_metadata: hal metadata structure
13321 * @enable: boolean whether to enable/disable sensor HDR
13322 *
13323 * RETURN : None
13324 *==========================================================================*/
13325int32_t QCamera3HardwareInterface::setSensorHDR(
13326 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13327{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013328 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013329 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13330
13331 if (enable) {
13332 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13333 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13334 #ifdef _LE_CAMERA_
13335 //Default to staggered HDR for IOT
13336 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13337 #else
13338 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13339 #endif
13340 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13341 }
13342
13343 bool isSupported = false;
13344 switch (sensor_hdr) {
13345 case CAM_SENSOR_HDR_IN_SENSOR:
13346 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13347 CAM_QCOM_FEATURE_SENSOR_HDR) {
13348 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013349 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013350 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013351 break;
13352 case CAM_SENSOR_HDR_ZIGZAG:
13353 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13354 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13355 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013356 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013357 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013358 break;
13359 case CAM_SENSOR_HDR_STAGGERED:
13360 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13361 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13362 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013363 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013364 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013365 break;
13366 case CAM_SENSOR_HDR_OFF:
13367 isSupported = true;
13368 LOGD("Turning off sensor HDR");
13369 break;
13370 default:
13371 LOGE("HDR mode %d not supported", sensor_hdr);
13372 rc = BAD_VALUE;
13373 break;
13374 }
13375
13376 if(isSupported) {
13377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13378 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13379 rc = BAD_VALUE;
13380 } else {
13381 if(!isVideoHdrEnable)
13382 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013383 }
13384 }
13385 return rc;
13386}
13387
13388/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013389 * FUNCTION : needRotationReprocess
13390 *
13391 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13392 *
13393 * PARAMETERS : none
13394 *
13395 * RETURN : true: needed
13396 * false: no need
13397 *==========================================================================*/
13398bool QCamera3HardwareInterface::needRotationReprocess()
13399{
13400 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13401 // current rotation is not zero, and pp has the capability to process rotation
13402 LOGH("need do reprocess for rotation");
13403 return true;
13404 }
13405
13406 return false;
13407}
13408
13409/*===========================================================================
13410 * FUNCTION : needReprocess
13411 *
13412 * DESCRIPTION: if reprocess in needed
13413 *
13414 * PARAMETERS : none
13415 *
13416 * RETURN : true: needed
13417 * false: no need
13418 *==========================================================================*/
13419bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13420{
13421 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13422 // TODO: add for ZSL HDR later
13423 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13424 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13425 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13426 return true;
13427 } else {
13428 LOGH("already post processed frame");
13429 return false;
13430 }
13431 }
13432 return needRotationReprocess();
13433}
13434
13435/*===========================================================================
13436 * FUNCTION : needJpegExifRotation
13437 *
13438 * DESCRIPTION: if rotation from jpeg is needed
13439 *
13440 * PARAMETERS : none
13441 *
13442 * RETURN : true: needed
13443 * false: no need
13444 *==========================================================================*/
13445bool QCamera3HardwareInterface::needJpegExifRotation()
13446{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013447 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013448 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13449 LOGD("Need use Jpeg EXIF Rotation");
13450 return true;
13451 }
13452 return false;
13453}
13454
13455/*===========================================================================
13456 * FUNCTION : addOfflineReprocChannel
13457 *
13458 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13459 * coming from input channel
13460 *
13461 * PARAMETERS :
13462 * @config : reprocess configuration
13463 * @inputChHandle : pointer to the input (source) channel
13464 *
13465 *
13466 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13467 *==========================================================================*/
13468QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13469 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13470{
13471 int32_t rc = NO_ERROR;
13472 QCamera3ReprocessChannel *pChannel = NULL;
13473
13474 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013475 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13476 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013477 if (NULL == pChannel) {
13478 LOGE("no mem for reprocess channel");
13479 return NULL;
13480 }
13481
13482 rc = pChannel->initialize(IS_TYPE_NONE);
13483 if (rc != NO_ERROR) {
13484 LOGE("init reprocess channel failed, ret = %d", rc);
13485 delete pChannel;
13486 return NULL;
13487 }
13488
13489 // pp feature config
13490 cam_pp_feature_config_t pp_config;
13491 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13492
13493 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13494 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13495 & CAM_QCOM_FEATURE_DSDN) {
13496 //Use CPP CDS incase h/w supports it.
13497 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13498 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13499 }
13500 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13501 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13502 }
13503
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013504 if (config.hdr_param.hdr_enable) {
13505 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13506 pp_config.hdr_param = config.hdr_param;
13507 }
13508
13509 if (mForceHdrSnapshot) {
13510 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13511 pp_config.hdr_param.hdr_enable = 1;
13512 pp_config.hdr_param.hdr_need_1x = 0;
13513 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13514 }
13515
Thierry Strudel3d639192016-09-09 11:52:26 -070013516 rc = pChannel->addReprocStreamsFromSource(pp_config,
13517 config,
13518 IS_TYPE_NONE,
13519 mMetadataChannel);
13520
13521 if (rc != NO_ERROR) {
13522 delete pChannel;
13523 return NULL;
13524 }
13525 return pChannel;
13526}
13527
13528/*===========================================================================
13529 * FUNCTION : getMobicatMask
13530 *
13531 * DESCRIPTION: returns mobicat mask
13532 *
13533 * PARAMETERS : none
13534 *
13535 * RETURN : mobicat mask
13536 *
13537 *==========================================================================*/
13538uint8_t QCamera3HardwareInterface::getMobicatMask()
13539{
13540 return m_MobicatMask;
13541}
13542
13543/*===========================================================================
13544 * FUNCTION : setMobicat
13545 *
13546 * DESCRIPTION: set Mobicat on/off.
13547 *
13548 * PARAMETERS :
13549 * @params : none
13550 *
13551 * RETURN : int32_t type of status
13552 * NO_ERROR -- success
13553 * none-zero failure code
13554 *==========================================================================*/
13555int32_t QCamera3HardwareInterface::setMobicat()
13556{
13557 char value [PROPERTY_VALUE_MAX];
13558 property_get("persist.camera.mobicat", value, "0");
13559 int32_t ret = NO_ERROR;
13560 uint8_t enableMobi = (uint8_t)atoi(value);
13561
13562 if (enableMobi) {
13563 tune_cmd_t tune_cmd;
13564 tune_cmd.type = SET_RELOAD_CHROMATIX;
13565 tune_cmd.module = MODULE_ALL;
13566 tune_cmd.value = TRUE;
13567 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13568 CAM_INTF_PARM_SET_VFE_COMMAND,
13569 tune_cmd);
13570
13571 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13572 CAM_INTF_PARM_SET_PP_COMMAND,
13573 tune_cmd);
13574 }
13575 m_MobicatMask = enableMobi;
13576
13577 return ret;
13578}
13579
13580/*===========================================================================
13581* FUNCTION : getLogLevel
13582*
13583* DESCRIPTION: Reads the log level property into a variable
13584*
13585* PARAMETERS :
13586* None
13587*
13588* RETURN :
13589* None
13590*==========================================================================*/
13591void QCamera3HardwareInterface::getLogLevel()
13592{
13593 char prop[PROPERTY_VALUE_MAX];
13594 uint32_t globalLogLevel = 0;
13595
13596 property_get("persist.camera.hal.debug", prop, "0");
13597 int val = atoi(prop);
13598 if (0 <= val) {
13599 gCamHal3LogLevel = (uint32_t)val;
13600 }
13601
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013602 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013603 gKpiDebugLevel = atoi(prop);
13604
13605 property_get("persist.camera.global.debug", prop, "0");
13606 val = atoi(prop);
13607 if (0 <= val) {
13608 globalLogLevel = (uint32_t)val;
13609 }
13610
13611 /* Highest log level among hal.logs and global.logs is selected */
13612 if (gCamHal3LogLevel < globalLogLevel)
13613 gCamHal3LogLevel = globalLogLevel;
13614
13615 return;
13616}
13617
13618/*===========================================================================
13619 * FUNCTION : validateStreamRotations
13620 *
13621 * DESCRIPTION: Check if the rotations requested are supported
13622 *
13623 * PARAMETERS :
13624 * @stream_list : streams to be configured
13625 *
13626 * RETURN : NO_ERROR on success
13627 * -EINVAL on failure
13628 *
13629 *==========================================================================*/
13630int QCamera3HardwareInterface::validateStreamRotations(
13631 camera3_stream_configuration_t *streamList)
13632{
13633 int rc = NO_ERROR;
13634
13635 /*
13636 * Loop through all streams requested in configuration
13637 * Check if unsupported rotations have been requested on any of them
13638 */
13639 for (size_t j = 0; j < streamList->num_streams; j++){
13640 camera3_stream_t *newStream = streamList->streams[j];
13641
13642 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13643 bool isImplDef = (newStream->format ==
13644 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13645 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13646 isImplDef);
13647
13648 if (isRotated && (!isImplDef || isZsl)) {
13649 LOGE("Error: Unsupported rotation of %d requested for stream"
13650 "type:%d and stream format:%d",
13651 newStream->rotation, newStream->stream_type,
13652 newStream->format);
13653 rc = -EINVAL;
13654 break;
13655 }
13656 }
13657
13658 return rc;
13659}
13660
13661/*===========================================================================
13662* FUNCTION : getFlashInfo
13663*
13664* DESCRIPTION: Retrieve information about whether the device has a flash.
13665*
13666* PARAMETERS :
13667* @cameraId : Camera id to query
13668* @hasFlash : Boolean indicating whether there is a flash device
13669* associated with given camera
13670* @flashNode : If a flash device exists, this will be its device node.
13671*
13672* RETURN :
13673* None
13674*==========================================================================*/
13675void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13676 bool& hasFlash,
13677 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13678{
13679 cam_capability_t* camCapability = gCamCapability[cameraId];
13680 if (NULL == camCapability) {
13681 hasFlash = false;
13682 flashNode[0] = '\0';
13683 } else {
13684 hasFlash = camCapability->flash_available;
13685 strlcpy(flashNode,
13686 (char*)camCapability->flash_dev_name,
13687 QCAMERA_MAX_FILEPATH_LENGTH);
13688 }
13689}
13690
13691/*===========================================================================
13692* FUNCTION : getEepromVersionInfo
13693*
13694* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13695*
13696* PARAMETERS : None
13697*
13698* RETURN : string describing EEPROM version
13699* "\0" if no such info available
13700*==========================================================================*/
13701const char *QCamera3HardwareInterface::getEepromVersionInfo()
13702{
13703 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13704}
13705
13706/*===========================================================================
13707* FUNCTION : getLdafCalib
13708*
13709* DESCRIPTION: Retrieve Laser AF calibration data
13710*
13711* PARAMETERS : None
13712*
13713* RETURN : Two uint32_t describing laser AF calibration data
13714* NULL if none is available.
13715*==========================================================================*/
13716const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13717{
13718 if (mLdafCalibExist) {
13719 return &mLdafCalib[0];
13720 } else {
13721 return NULL;
13722 }
13723}
13724
13725/*===========================================================================
13726 * FUNCTION : dynamicUpdateMetaStreamInfo
13727 *
13728 * DESCRIPTION: This function:
13729 * (1) stops all the channels
13730 * (2) returns error on pending requests and buffers
13731 * (3) sends metastream_info in setparams
13732 * (4) starts all channels
13733 * This is useful when sensor has to be restarted to apply any
13734 * settings such as frame rate from a different sensor mode
13735 *
13736 * PARAMETERS : None
13737 *
13738 * RETURN : NO_ERROR on success
13739 * Error codes on failure
13740 *
13741 *==========================================================================*/
13742int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13743{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013744 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013745 int rc = NO_ERROR;
13746
13747 LOGD("E");
13748
13749 rc = stopAllChannels();
13750 if (rc < 0) {
13751 LOGE("stopAllChannels failed");
13752 return rc;
13753 }
13754
13755 rc = notifyErrorForPendingRequests();
13756 if (rc < 0) {
13757 LOGE("notifyErrorForPendingRequests failed");
13758 return rc;
13759 }
13760
13761 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13762 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13763 "Format:%d",
13764 mStreamConfigInfo.type[i],
13765 mStreamConfigInfo.stream_sizes[i].width,
13766 mStreamConfigInfo.stream_sizes[i].height,
13767 mStreamConfigInfo.postprocess_mask[i],
13768 mStreamConfigInfo.format[i]);
13769 }
13770
13771 /* Send meta stream info once again so that ISP can start */
13772 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13773 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13774 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13775 mParameters);
13776 if (rc < 0) {
13777 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13778 }
13779
13780 rc = startAllChannels();
13781 if (rc < 0) {
13782 LOGE("startAllChannels failed");
13783 return rc;
13784 }
13785
13786 LOGD("X");
13787 return rc;
13788}
13789
13790/*===========================================================================
13791 * FUNCTION : stopAllChannels
13792 *
13793 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13794 *
13795 * PARAMETERS : None
13796 *
13797 * RETURN : NO_ERROR on success
13798 * Error codes on failure
13799 *
13800 *==========================================================================*/
13801int32_t QCamera3HardwareInterface::stopAllChannels()
13802{
13803 int32_t rc = NO_ERROR;
13804
13805 LOGD("Stopping all channels");
13806 // Stop the Streams/Channels
13807 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13808 it != mStreamInfo.end(); it++) {
13809 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13810 if (channel) {
13811 channel->stop();
13812 }
13813 (*it)->status = INVALID;
13814 }
13815
13816 if (mSupportChannel) {
13817 mSupportChannel->stop();
13818 }
13819 if (mAnalysisChannel) {
13820 mAnalysisChannel->stop();
13821 }
13822 if (mRawDumpChannel) {
13823 mRawDumpChannel->stop();
13824 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013825 if (mHdrPlusRawSrcChannel) {
13826 mHdrPlusRawSrcChannel->stop();
13827 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013828 if (mMetadataChannel) {
13829 /* If content of mStreamInfo is not 0, there is metadata stream */
13830 mMetadataChannel->stop();
13831 }
13832
13833 LOGD("All channels stopped");
13834 return rc;
13835}
13836
13837/*===========================================================================
13838 * FUNCTION : startAllChannels
13839 *
13840 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13841 *
13842 * PARAMETERS : None
13843 *
13844 * RETURN : NO_ERROR on success
13845 * Error codes on failure
13846 *
13847 *==========================================================================*/
13848int32_t QCamera3HardwareInterface::startAllChannels()
13849{
13850 int32_t rc = NO_ERROR;
13851
13852 LOGD("Start all channels ");
13853 // Start the Streams/Channels
13854 if (mMetadataChannel) {
13855 /* If content of mStreamInfo is not 0, there is metadata stream */
13856 rc = mMetadataChannel->start();
13857 if (rc < 0) {
13858 LOGE("META channel start failed");
13859 return rc;
13860 }
13861 }
13862 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13863 it != mStreamInfo.end(); it++) {
13864 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13865 if (channel) {
13866 rc = channel->start();
13867 if (rc < 0) {
13868 LOGE("channel start failed");
13869 return rc;
13870 }
13871 }
13872 }
13873 if (mAnalysisChannel) {
13874 mAnalysisChannel->start();
13875 }
13876 if (mSupportChannel) {
13877 rc = mSupportChannel->start();
13878 if (rc < 0) {
13879 LOGE("Support channel start failed");
13880 return rc;
13881 }
13882 }
13883 if (mRawDumpChannel) {
13884 rc = mRawDumpChannel->start();
13885 if (rc < 0) {
13886 LOGE("RAW dump channel start failed");
13887 return rc;
13888 }
13889 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013890 if (mHdrPlusRawSrcChannel) {
13891 rc = mHdrPlusRawSrcChannel->start();
13892 if (rc < 0) {
13893 LOGE("HDR+ RAW channel start failed");
13894 return rc;
13895 }
13896 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013897
13898 LOGD("All channels started");
13899 return rc;
13900}
13901
13902/*===========================================================================
13903 * FUNCTION : notifyErrorForPendingRequests
13904 *
13905 * DESCRIPTION: This function sends error for all the pending requests/buffers
13906 *
13907 * PARAMETERS : None
13908 *
13909 * RETURN : Error codes
13910 * NO_ERROR on success
13911 *
13912 *==========================================================================*/
13913int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13914{
13915 int32_t rc = NO_ERROR;
13916 unsigned int frameNum = 0;
13917 camera3_capture_result_t result;
13918 camera3_stream_buffer_t *pStream_Buf = NULL;
13919
13920 memset(&result, 0, sizeof(camera3_capture_result_t));
13921
13922 if (mPendingRequestsList.size() > 0) {
13923 pendingRequestIterator i = mPendingRequestsList.begin();
13924 frameNum = i->frame_number;
13925 } else {
13926 /* There might still be pending buffers even though there are
13927 no pending requests. Setting the frameNum to MAX so that
13928 all the buffers with smaller frame numbers are returned */
13929 frameNum = UINT_MAX;
13930 }
13931
13932 LOGH("Oldest frame num on mPendingRequestsList = %u",
13933 frameNum);
13934
Emilian Peev7650c122017-01-19 08:24:33 -080013935 notifyErrorFoPendingDepthData(mDepthChannel);
13936
Thierry Strudel3d639192016-09-09 11:52:26 -070013937 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13938 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13939
13940 if (req->frame_number < frameNum) {
13941 // Send Error notify to frameworks for each buffer for which
13942 // metadata buffer is already sent
13943 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13944 req->frame_number, req->mPendingBufferList.size());
13945
13946 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13947 if (NULL == pStream_Buf) {
13948 LOGE("No memory for pending buffers array");
13949 return NO_MEMORY;
13950 }
13951 memset(pStream_Buf, 0,
13952 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13953 result.result = NULL;
13954 result.frame_number = req->frame_number;
13955 result.num_output_buffers = req->mPendingBufferList.size();
13956 result.output_buffers = pStream_Buf;
13957
13958 size_t index = 0;
13959 for (auto info = req->mPendingBufferList.begin();
13960 info != req->mPendingBufferList.end(); ) {
13961
13962 camera3_notify_msg_t notify_msg;
13963 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13964 notify_msg.type = CAMERA3_MSG_ERROR;
13965 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13966 notify_msg.message.error.error_stream = info->stream;
13967 notify_msg.message.error.frame_number = req->frame_number;
13968 pStream_Buf[index].acquire_fence = -1;
13969 pStream_Buf[index].release_fence = -1;
13970 pStream_Buf[index].buffer = info->buffer;
13971 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13972 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013973 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013974 index++;
13975 // Remove buffer from list
13976 info = req->mPendingBufferList.erase(info);
13977 }
13978
13979 // Remove this request from Map
13980 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13981 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13982 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13983
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013984 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013985
13986 delete [] pStream_Buf;
13987 } else {
13988
13989 // Go through the pending requests info and send error request to framework
13990 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13991
13992 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13993
13994 // Send error notify to frameworks
13995 camera3_notify_msg_t notify_msg;
13996 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13997 notify_msg.type = CAMERA3_MSG_ERROR;
13998 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13999 notify_msg.message.error.error_stream = NULL;
14000 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014001 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014002
14003 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
14004 if (NULL == pStream_Buf) {
14005 LOGE("No memory for pending buffers array");
14006 return NO_MEMORY;
14007 }
14008 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14009
14010 result.result = NULL;
14011 result.frame_number = req->frame_number;
14012 result.input_buffer = i->input_buffer;
14013 result.num_output_buffers = req->mPendingBufferList.size();
14014 result.output_buffers = pStream_Buf;
14015
14016 size_t index = 0;
14017 for (auto info = req->mPendingBufferList.begin();
14018 info != req->mPendingBufferList.end(); ) {
14019 pStream_Buf[index].acquire_fence = -1;
14020 pStream_Buf[index].release_fence = -1;
14021 pStream_Buf[index].buffer = info->buffer;
14022 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14023 pStream_Buf[index].stream = info->stream;
14024 index++;
14025 // Remove buffer from list
14026 info = req->mPendingBufferList.erase(info);
14027 }
14028
14029 // Remove this request from Map
14030 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14031 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14032 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14033
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014034 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014035 delete [] pStream_Buf;
14036 i = erasePendingRequest(i);
14037 }
14038 }
14039
14040 /* Reset pending frame Drop list and requests list */
14041 mPendingFrameDropList.clear();
14042
14043 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14044 req.mPendingBufferList.clear();
14045 }
14046 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014047 LOGH("Cleared all the pending buffers ");
14048
14049 return rc;
14050}
14051
14052bool QCamera3HardwareInterface::isOnEncoder(
14053 const cam_dimension_t max_viewfinder_size,
14054 uint32_t width, uint32_t height)
14055{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014056 return ((width > (uint32_t)max_viewfinder_size.width) ||
14057 (height > (uint32_t)max_viewfinder_size.height) ||
14058 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14059 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014060}
14061
14062/*===========================================================================
14063 * FUNCTION : setBundleInfo
14064 *
14065 * DESCRIPTION: Set bundle info for all streams that are bundle.
14066 *
14067 * PARAMETERS : None
14068 *
14069 * RETURN : NO_ERROR on success
14070 * Error codes on failure
14071 *==========================================================================*/
14072int32_t QCamera3HardwareInterface::setBundleInfo()
14073{
14074 int32_t rc = NO_ERROR;
14075
14076 if (mChannelHandle) {
14077 cam_bundle_config_t bundleInfo;
14078 memset(&bundleInfo, 0, sizeof(bundleInfo));
14079 rc = mCameraHandle->ops->get_bundle_info(
14080 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14081 if (rc != NO_ERROR) {
14082 LOGE("get_bundle_info failed");
14083 return rc;
14084 }
14085 if (mAnalysisChannel) {
14086 mAnalysisChannel->setBundleInfo(bundleInfo);
14087 }
14088 if (mSupportChannel) {
14089 mSupportChannel->setBundleInfo(bundleInfo);
14090 }
14091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14092 it != mStreamInfo.end(); it++) {
14093 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14094 channel->setBundleInfo(bundleInfo);
14095 }
14096 if (mRawDumpChannel) {
14097 mRawDumpChannel->setBundleInfo(bundleInfo);
14098 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014099 if (mHdrPlusRawSrcChannel) {
14100 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14101 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014102 }
14103
14104 return rc;
14105}
14106
14107/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014108 * FUNCTION : setInstantAEC
14109 *
14110 * DESCRIPTION: Set Instant AEC related params.
14111 *
14112 * PARAMETERS :
14113 * @meta: CameraMetadata reference
14114 *
14115 * RETURN : NO_ERROR on success
14116 * Error codes on failure
14117 *==========================================================================*/
14118int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14119{
14120 int32_t rc = NO_ERROR;
14121 uint8_t val = 0;
14122 char prop[PROPERTY_VALUE_MAX];
14123
14124 // First try to configure instant AEC from framework metadata
14125 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14126 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14127 }
14128
14129 // If framework did not set this value, try to read from set prop.
14130 if (val == 0) {
14131 memset(prop, 0, sizeof(prop));
14132 property_get("persist.camera.instant.aec", prop, "0");
14133 val = (uint8_t)atoi(prop);
14134 }
14135
14136 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14137 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14138 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14139 mInstantAEC = val;
14140 mInstantAECSettledFrameNumber = 0;
14141 mInstantAecFrameIdxCount = 0;
14142 LOGH("instantAEC value set %d",val);
14143 if (mInstantAEC) {
14144 memset(prop, 0, sizeof(prop));
14145 property_get("persist.camera.ae.instant.bound", prop, "10");
14146 int32_t aec_frame_skip_cnt = atoi(prop);
14147 if (aec_frame_skip_cnt >= 0) {
14148 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14149 } else {
14150 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14151 rc = BAD_VALUE;
14152 }
14153 }
14154 } else {
14155 LOGE("Bad instant aec value set %d", val);
14156 rc = BAD_VALUE;
14157 }
14158 return rc;
14159}
14160
14161/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014162 * FUNCTION : get_num_overall_buffers
14163 *
14164 * DESCRIPTION: Estimate number of pending buffers across all requests.
14165 *
14166 * PARAMETERS : None
14167 *
14168 * RETURN : Number of overall pending buffers
14169 *
14170 *==========================================================================*/
14171uint32_t PendingBuffersMap::get_num_overall_buffers()
14172{
14173 uint32_t sum_buffers = 0;
14174 for (auto &req : mPendingBuffersInRequest) {
14175 sum_buffers += req.mPendingBufferList.size();
14176 }
14177 return sum_buffers;
14178}
14179
14180/*===========================================================================
14181 * FUNCTION : removeBuf
14182 *
14183 * DESCRIPTION: Remove a matching buffer from tracker.
14184 *
14185 * PARAMETERS : @buffer: image buffer for the callback
14186 *
14187 * RETURN : None
14188 *
14189 *==========================================================================*/
14190void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14191{
14192 bool buffer_found = false;
14193 for (auto req = mPendingBuffersInRequest.begin();
14194 req != mPendingBuffersInRequest.end(); req++) {
14195 for (auto k = req->mPendingBufferList.begin();
14196 k != req->mPendingBufferList.end(); k++ ) {
14197 if (k->buffer == buffer) {
14198 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14199 req->frame_number, buffer);
14200 k = req->mPendingBufferList.erase(k);
14201 if (req->mPendingBufferList.empty()) {
14202 // Remove this request from Map
14203 req = mPendingBuffersInRequest.erase(req);
14204 }
14205 buffer_found = true;
14206 break;
14207 }
14208 }
14209 if (buffer_found) {
14210 break;
14211 }
14212 }
14213 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14214 get_num_overall_buffers());
14215}
14216
14217/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014218 * FUNCTION : getBufErrStatus
14219 *
14220 * DESCRIPTION: get buffer error status
14221 *
14222 * PARAMETERS : @buffer: buffer handle
14223 *
14224 * RETURN : Error status
14225 *
14226 *==========================================================================*/
14227int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14228{
14229 for (auto& req : mPendingBuffersInRequest) {
14230 for (auto& k : req.mPendingBufferList) {
14231 if (k.buffer == buffer)
14232 return k.bufStatus;
14233 }
14234 }
14235 return CAMERA3_BUFFER_STATUS_OK;
14236}
14237
14238/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014239 * FUNCTION : setPAAFSupport
14240 *
14241 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14242 * feature mask according to stream type and filter
14243 * arrangement
14244 *
14245 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14246 * @stream_type: stream type
14247 * @filter_arrangement: filter arrangement
14248 *
14249 * RETURN : None
14250 *==========================================================================*/
14251void QCamera3HardwareInterface::setPAAFSupport(
14252 cam_feature_mask_t& feature_mask,
14253 cam_stream_type_t stream_type,
14254 cam_color_filter_arrangement_t filter_arrangement)
14255{
Thierry Strudel3d639192016-09-09 11:52:26 -070014256 switch (filter_arrangement) {
14257 case CAM_FILTER_ARRANGEMENT_RGGB:
14258 case CAM_FILTER_ARRANGEMENT_GRBG:
14259 case CAM_FILTER_ARRANGEMENT_GBRG:
14260 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014261 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14262 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014263 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014264 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14265 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014266 }
14267 break;
14268 case CAM_FILTER_ARRANGEMENT_Y:
14269 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14270 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14271 }
14272 break;
14273 default:
14274 break;
14275 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014276 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14277 feature_mask, stream_type, filter_arrangement);
14278
14279
Thierry Strudel3d639192016-09-09 11:52:26 -070014280}
14281
14282/*===========================================================================
14283* FUNCTION : getSensorMountAngle
14284*
14285* DESCRIPTION: Retrieve sensor mount angle
14286*
14287* PARAMETERS : None
14288*
14289* RETURN : sensor mount angle in uint32_t
14290*==========================================================================*/
14291uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14292{
14293 return gCamCapability[mCameraId]->sensor_mount_angle;
14294}
14295
14296/*===========================================================================
14297* FUNCTION : getRelatedCalibrationData
14298*
14299* DESCRIPTION: Retrieve related system calibration data
14300*
14301* PARAMETERS : None
14302*
14303* RETURN : Pointer of related system calibration data
14304*==========================================================================*/
14305const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14306{
14307 return (const cam_related_system_calibration_data_t *)
14308 &(gCamCapability[mCameraId]->related_cam_calibration);
14309}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014310
14311/*===========================================================================
14312 * FUNCTION : is60HzZone
14313 *
14314 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14315 *
14316 * PARAMETERS : None
14317 *
14318 * RETURN : True if in 60Hz zone, False otherwise
14319 *==========================================================================*/
14320bool QCamera3HardwareInterface::is60HzZone()
14321{
14322 time_t t = time(NULL);
14323 struct tm lt;
14324
14325 struct tm* r = localtime_r(&t, &lt);
14326
14327 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14328 return true;
14329 else
14330 return false;
14331}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014332
14333/*===========================================================================
14334 * FUNCTION : adjustBlackLevelForCFA
14335 *
14336 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14337 * of bayer CFA (Color Filter Array).
14338 *
14339 * PARAMETERS : @input: black level pattern in the order of RGGB
14340 * @output: black level pattern in the order of CFA
14341 * @color_arrangement: CFA color arrangement
14342 *
14343 * RETURN : None
14344 *==========================================================================*/
14345template<typename T>
14346void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14347 T input[BLACK_LEVEL_PATTERN_CNT],
14348 T output[BLACK_LEVEL_PATTERN_CNT],
14349 cam_color_filter_arrangement_t color_arrangement)
14350{
14351 switch (color_arrangement) {
14352 case CAM_FILTER_ARRANGEMENT_GRBG:
14353 output[0] = input[1];
14354 output[1] = input[0];
14355 output[2] = input[3];
14356 output[3] = input[2];
14357 break;
14358 case CAM_FILTER_ARRANGEMENT_GBRG:
14359 output[0] = input[2];
14360 output[1] = input[3];
14361 output[2] = input[0];
14362 output[3] = input[1];
14363 break;
14364 case CAM_FILTER_ARRANGEMENT_BGGR:
14365 output[0] = input[3];
14366 output[1] = input[2];
14367 output[2] = input[1];
14368 output[3] = input[0];
14369 break;
14370 case CAM_FILTER_ARRANGEMENT_RGGB:
14371 output[0] = input[0];
14372 output[1] = input[1];
14373 output[2] = input[2];
14374 output[3] = input[3];
14375 break;
14376 default:
14377 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14378 break;
14379 }
14380}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014381
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014382void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14383 CameraMetadata &resultMetadata,
14384 std::shared_ptr<metadata_buffer_t> settings)
14385{
14386 if (settings == nullptr) {
14387 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14388 return;
14389 }
14390
14391 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14392 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14393 }
14394
14395 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14396 String8 str((const char *)gps_methods);
14397 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14398 }
14399
14400 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14401 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14402 }
14403
14404 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14405 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14406 }
14407
14408 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14409 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14410 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14411 }
14412
14413 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14414 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14415 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14416 }
14417
14418 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14419 int32_t fwk_thumb_size[2];
14420 fwk_thumb_size[0] = thumb_size->width;
14421 fwk_thumb_size[1] = thumb_size->height;
14422 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14423 }
14424
14425 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14426 uint8_t fwk_intent = intent[0];
14427 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14428 }
14429}
14430
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014431bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14432 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14433 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014434{
14435 if (hdrPlusRequest == nullptr) return false;
14436
14437 // Check noise reduction mode is high quality.
14438 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14439 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14440 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014441 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14442 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014443 return false;
14444 }
14445
14446 // Check edge mode is high quality.
14447 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14448 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14449 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14450 return false;
14451 }
14452
14453 if (request.num_output_buffers != 1 ||
14454 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14455 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014456 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14457 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14458 request.output_buffers[0].stream->width,
14459 request.output_buffers[0].stream->height,
14460 request.output_buffers[0].stream->format);
14461 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014462 return false;
14463 }
14464
14465 // Get a YUV buffer from pic channel.
14466 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14467 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14468 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14469 if (res != OK) {
14470 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14471 __FUNCTION__, strerror(-res), res);
14472 return false;
14473 }
14474
14475 pbcamera::StreamBuffer buffer;
14476 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014477 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014478 buffer.data = yuvBuffer->buffer;
14479 buffer.dataSize = yuvBuffer->frame_len;
14480
14481 pbcamera::CaptureRequest pbRequest;
14482 pbRequest.id = request.frame_number;
14483 pbRequest.outputBuffers.push_back(buffer);
14484
14485 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014486 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014487 if (res != OK) {
14488 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14489 strerror(-res), res);
14490 return false;
14491 }
14492
14493 hdrPlusRequest->yuvBuffer = yuvBuffer;
14494 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14495
14496 return true;
14497}
14498
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014499status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14500{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014501 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14502 return OK;
14503 }
14504
14505 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14506 if (res != OK) {
14507 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14508 strerror(-res), res);
14509 return res;
14510 }
14511 gHdrPlusClientOpening = true;
14512
14513 return OK;
14514}
14515
Chien-Yu Chenee335912017-02-09 17:53:20 -080014516status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14517{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014518 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014519
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014520 // Check if gHdrPlusClient is opened or being opened.
14521 if (gHdrPlusClient == nullptr) {
14522 if (gHdrPlusClientOpening) {
14523 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14524 return OK;
14525 }
14526
14527 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014528 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014529 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14530 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014531 return res;
14532 }
14533
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014534 // When opening HDR+ client completes, HDR+ mode will be enabled.
14535 return OK;
14536
Chien-Yu Chenee335912017-02-09 17:53:20 -080014537 }
14538
14539 // Configure stream for HDR+.
14540 res = configureHdrPlusStreamsLocked();
14541 if (res != OK) {
14542 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014543 return res;
14544 }
14545
14546 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14547 res = gHdrPlusClient->setZslHdrPlusMode(true);
14548 if (res != OK) {
14549 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014550 return res;
14551 }
14552
14553 mHdrPlusModeEnabled = true;
14554 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14555
14556 return OK;
14557}
14558
14559void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14560{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014561 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014562 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014563 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14564 if (res != OK) {
14565 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14566 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014567
14568 // Close HDR+ client so Easel can enter low power mode.
14569 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14570 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014571 }
14572
14573 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014574 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014575 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14576}
14577
14578status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014579{
14580 pbcamera::InputConfiguration inputConfig;
14581 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14582 status_t res = OK;
14583
14584 // Configure HDR+ client streams.
14585 // Get input config.
14586 if (mHdrPlusRawSrcChannel) {
14587 // HDR+ input buffers will be provided by HAL.
14588 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14589 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14590 if (res != OK) {
14591 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14592 __FUNCTION__, strerror(-res), res);
14593 return res;
14594 }
14595
14596 inputConfig.isSensorInput = false;
14597 } else {
14598 // Sensor MIPI will send data to Easel.
14599 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014600 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014601 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14602 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14603 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14604 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14605 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14606 if (mSensorModeInfo.num_raw_bits != 10) {
14607 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14608 mSensorModeInfo.num_raw_bits);
14609 return BAD_VALUE;
14610 }
14611
14612 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014613 }
14614
14615 // Get output configurations.
14616 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014617 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014618
14619 // Easel may need to output YUV output buffers if mPictureChannel was created.
14620 pbcamera::StreamConfiguration yuvOutputConfig;
14621 if (mPictureChannel != nullptr) {
14622 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14623 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14624 if (res != OK) {
14625 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14626 __FUNCTION__, strerror(-res), res);
14627
14628 return res;
14629 }
14630
14631 outputStreamConfigs.push_back(yuvOutputConfig);
14632 }
14633
14634 // TODO: consider other channels for YUV output buffers.
14635
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014636 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014637 if (res != OK) {
14638 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14639 strerror(-res), res);
14640 return res;
14641 }
14642
14643 return OK;
14644}
14645
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014646void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14647{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014648 if (client == nullptr) {
14649 ALOGE("%s: Opened client is null.", __FUNCTION__);
14650 return;
14651 }
14652
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014653 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014654 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14655
14656 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014657 if (!gHdrPlusClientOpening) {
14658 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14659 return;
14660 }
14661
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014662 gHdrPlusClient = std::move(client);
14663 gHdrPlusClientOpening = false;
14664
14665 // Set static metadata.
14666 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14667 if (res != OK) {
14668 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14669 __FUNCTION__, strerror(-res), res);
14670 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14671 gHdrPlusClient = nullptr;
14672 return;
14673 }
14674
14675 // Enable HDR+ mode.
14676 res = enableHdrPlusModeLocked();
14677 if (res != OK) {
14678 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14679 }
14680}
14681
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014682void QCamera3HardwareInterface::onOpenFailed(status_t err)
14683{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014684 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14685 Mutex::Autolock l(gHdrPlusClientLock);
14686 gHdrPlusClientOpening = false;
14687}
14688
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014689void QCamera3HardwareInterface::onFatalError()
14690{
14691 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14692
14693 // Set HAL state to error.
14694 pthread_mutex_lock(&mMutex);
14695 mState = ERROR;
14696 pthread_mutex_unlock(&mMutex);
14697
14698 handleCameraDeviceError();
14699}
14700
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014701void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014702 const camera_metadata_t &resultMetadata)
14703{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014704 if (result != nullptr) {
14705 if (result->outputBuffers.size() != 1) {
14706 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14707 result->outputBuffers.size());
14708 return;
14709 }
14710
14711 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14712 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14713 result->outputBuffers[0].streamId);
14714 return;
14715 }
14716
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014717 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014718 HdrPlusPendingRequest pendingRequest;
14719 {
14720 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14721 auto req = mHdrPlusPendingRequests.find(result->requestId);
14722 pendingRequest = req->second;
14723 }
14724
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014725 // Update the result metadata with the settings of the HDR+ still capture request because
14726 // the result metadata belongs to a ZSL buffer.
14727 CameraMetadata metadata;
14728 metadata = &resultMetadata;
14729 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14730 camera_metadata_t* updatedResultMetadata = metadata.release();
14731
14732 QCamera3PicChannel *picChannel =
14733 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14734
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014735 // Check if dumping HDR+ YUV output is enabled.
14736 char prop[PROPERTY_VALUE_MAX];
14737 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14738 bool dumpYuvOutput = atoi(prop);
14739
14740 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014741 // Dump yuv buffer to a ppm file.
14742 pbcamera::StreamConfiguration outputConfig;
14743 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14744 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14745 if (rc == OK) {
14746 char buf[FILENAME_MAX] = {};
14747 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14748 result->requestId, result->outputBuffers[0].streamId,
14749 outputConfig.image.width, outputConfig.image.height);
14750
14751 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14752 } else {
14753 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14754 __FUNCTION__, strerror(-rc), rc);
14755 }
14756 }
14757
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014758 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14759 auto halMetadata = std::make_shared<metadata_buffer_t>();
14760 clear_metadata_buffer(halMetadata.get());
14761
14762 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14763 // encoding.
14764 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14765 halStreamId, /*minFrameDuration*/0);
14766 if (res == OK) {
14767 // Return the buffer to pic channel for encoding.
14768 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14769 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14770 halMetadata);
14771 } else {
14772 // Return the buffer without encoding.
14773 // TODO: This should not happen but we may want to report an error buffer to camera
14774 // service.
14775 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14776 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14777 strerror(-res), res);
14778 }
14779
14780 // Send HDR+ metadata to framework.
14781 {
14782 pthread_mutex_lock(&mMutex);
14783
14784 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14785 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14786 pthread_mutex_unlock(&mMutex);
14787 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014788
14789 // Remove the HDR+ pending request.
14790 {
14791 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14792 auto req = mHdrPlusPendingRequests.find(result->requestId);
14793 mHdrPlusPendingRequests.erase(req);
14794 }
14795 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014796}
14797
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014798void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14799{
14800 if (failedResult == nullptr) {
14801 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14802 return;
14803 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014804
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014805 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014806
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014807 // Remove the pending HDR+ request.
14808 {
14809 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14810 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14811
14812 // Return the buffer to pic channel.
14813 QCamera3PicChannel *picChannel =
14814 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14815 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14816
14817 mHdrPlusPendingRequests.erase(pendingRequest);
14818 }
14819
14820 pthread_mutex_lock(&mMutex);
14821
14822 // Find the pending buffers.
14823 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14824 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14825 if (pendingBuffers->frame_number == failedResult->requestId) {
14826 break;
14827 }
14828 pendingBuffers++;
14829 }
14830
14831 // Send out buffer errors for the pending buffers.
14832 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14833 std::vector<camera3_stream_buffer_t> streamBuffers;
14834 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14835 // Prepare a stream buffer.
14836 camera3_stream_buffer_t streamBuffer = {};
14837 streamBuffer.stream = buffer.stream;
14838 streamBuffer.buffer = buffer.buffer;
14839 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14840 streamBuffer.acquire_fence = -1;
14841 streamBuffer.release_fence = -1;
14842
14843 streamBuffers.push_back(streamBuffer);
14844
14845 // Send out error buffer event.
14846 camera3_notify_msg_t notify_msg = {};
14847 notify_msg.type = CAMERA3_MSG_ERROR;
14848 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14849 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14850 notify_msg.message.error.error_stream = buffer.stream;
14851
14852 orchestrateNotify(&notify_msg);
14853 }
14854
14855 camera3_capture_result_t result = {};
14856 result.frame_number = pendingBuffers->frame_number;
14857 result.num_output_buffers = streamBuffers.size();
14858 result.output_buffers = &streamBuffers[0];
14859
14860 // Send out result with buffer errors.
14861 orchestrateResult(&result);
14862
14863 // Remove pending buffers.
14864 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14865 }
14866
14867 // Remove pending request.
14868 auto halRequest = mPendingRequestsList.begin();
14869 while (halRequest != mPendingRequestsList.end()) {
14870 if (halRequest->frame_number == failedResult->requestId) {
14871 mPendingRequestsList.erase(halRequest);
14872 break;
14873 }
14874 halRequest++;
14875 }
14876
14877 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014878}
14879
Thierry Strudel3d639192016-09-09 11:52:26 -070014880}; //end namespace qcamera