blob: f52814e7750888383ab0d014244b7a03c0088f6f [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Thierry Strudel04e026f2016-10-10 11:27:36 -0700117/* Face landmarks indices */
118#define LEFT_EYE_X 0
119#define LEFT_EYE_Y 1
120#define RIGHT_EYE_X 2
121#define RIGHT_EYE_Y 3
122#define MOUTH_X 4
123#define MOUTH_Y 5
124#define TOTAL_LANDMARK_INDICES 6
125
Zhijun He2a5df222017-04-04 18:20:38 -0700126// Max preferred zoom
127#define MAX_PREFERRED_ZOOM_RATIO 5.0
128
Thierry Strudel3d639192016-09-09 11:52:26 -0700129cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
130const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
131extern pthread_mutex_t gCamLock;
132volatile uint32_t gCamHal3LogLevel = 1;
133extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700134
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800135// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700136// The following Easel related variables must be protected by gHdrPlusClientLock.
137EaselManagerClient gEaselManagerClient;
138bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
139std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
140bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
143bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700144
145Mutex gHdrPlusClientLock; // Protect above Easel related variables.
146
Thierry Strudel3d639192016-09-09 11:52:26 -0700147
148const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
149 {"On", CAM_CDS_MODE_ON},
150 {"Off", CAM_CDS_MODE_OFF},
151 {"Auto",CAM_CDS_MODE_AUTO}
152};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700153const QCamera3HardwareInterface::QCameraMap<
154 camera_metadata_enum_android_video_hdr_mode_t,
155 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
156 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
157 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
158};
159
Thierry Strudel54dc9782017-02-15 12:12:10 -0800160const QCamera3HardwareInterface::QCameraMap<
161 camera_metadata_enum_android_binning_correction_mode_t,
162 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
163 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
164 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
165};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700166
167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_ir_mode_t,
169 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
170 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
171 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
172 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
173};
Thierry Strudel3d639192016-09-09 11:52:26 -0700174
175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_effect_mode_t,
177 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
178 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
179 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
180 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
181 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
182 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
183 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
184 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
185 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
186 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
187};
188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_awb_mode_t,
191 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
192 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
193 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
194 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
195 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
196 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
197 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
198 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
199 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
200 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_scene_mode_t,
205 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
206 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
207 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
208 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
209 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
210 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
211 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
212 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
213 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
214 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
215 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
216 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
217 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
218 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
219 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
220 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800221 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
222 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_control_af_mode_t,
227 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
228 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
229 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
230 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
231 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
232 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
233 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
234 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
235};
236
237const QCamera3HardwareInterface::QCameraMap<
238 camera_metadata_enum_android_color_correction_aberration_mode_t,
239 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
240 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
241 CAM_COLOR_CORRECTION_ABERRATION_OFF },
242 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
243 CAM_COLOR_CORRECTION_ABERRATION_FAST },
244 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
245 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
246};
247
248const QCamera3HardwareInterface::QCameraMap<
249 camera_metadata_enum_android_control_ae_antibanding_mode_t,
250 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
251 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
252 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
253 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
254 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_mode_t,
259 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
260 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
261 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
262 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
263 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
264 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
265};
266
267const QCamera3HardwareInterface::QCameraMap<
268 camera_metadata_enum_android_flash_mode_t,
269 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
270 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
271 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
272 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
273};
274
275const QCamera3HardwareInterface::QCameraMap<
276 camera_metadata_enum_android_statistics_face_detect_mode_t,
277 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
278 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
279 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
280 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
285 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
286 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
287 CAM_FOCUS_UNCALIBRATED },
288 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
289 CAM_FOCUS_APPROXIMATE },
290 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
291 CAM_FOCUS_CALIBRATED }
292};
293
294const QCamera3HardwareInterface::QCameraMap<
295 camera_metadata_enum_android_lens_state_t,
296 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
297 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
298 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
299};
300
301const int32_t available_thumbnail_sizes[] = {0, 0,
302 176, 144,
303 240, 144,
304 256, 144,
305 240, 160,
306 256, 154,
307 240, 240,
308 320, 240};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_sensor_test_pattern_mode_t,
312 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
313 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
314 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
315 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
316 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
317 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
318 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
319};
320
321/* Since there is no mapping for all the options some Android enum are not listed.
322 * Also, the order in this list is important because while mapping from HAL to Android it will
323 * traverse from lower to higher index which means that for HAL values that are map to different
324 * Android values, the traverse logic will select the first one found.
325 */
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_reference_illuminant1_t,
328 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
329 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
335 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
336 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
337 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
345};
346
347const QCamera3HardwareInterface::QCameraMap<
348 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
349 { 60, CAM_HFR_MODE_60FPS},
350 { 90, CAM_HFR_MODE_90FPS},
351 { 120, CAM_HFR_MODE_120FPS},
352 { 150, CAM_HFR_MODE_150FPS},
353 { 180, CAM_HFR_MODE_180FPS},
354 { 210, CAM_HFR_MODE_210FPS},
355 { 240, CAM_HFR_MODE_240FPS},
356 { 480, CAM_HFR_MODE_480FPS},
357};
358
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700359const QCamera3HardwareInterface::QCameraMap<
360 qcamera3_ext_instant_aec_mode_t,
361 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
362 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
363 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
364 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
365};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800366
367const QCamera3HardwareInterface::QCameraMap<
368 qcamera3_ext_exposure_meter_mode_t,
369 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
370 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
371 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
372 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
373 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
374 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
375 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
376 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
377};
378
379const QCamera3HardwareInterface::QCameraMap<
380 qcamera3_ext_iso_mode_t,
381 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
382 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
383 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
384 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
385 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
386 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
387 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
388 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
389 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
390};
391
Thierry Strudel3d639192016-09-09 11:52:26 -0700392camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
393 .initialize = QCamera3HardwareInterface::initialize,
394 .configure_streams = QCamera3HardwareInterface::configure_streams,
395 .register_stream_buffers = NULL,
396 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
397 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
398 .get_metadata_vendor_tag_ops = NULL,
399 .dump = QCamera3HardwareInterface::dump,
400 .flush = QCamera3HardwareInterface::flush,
401 .reserved = {0},
402};
403
404// initialise to some default value
405uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
406
407/*===========================================================================
408 * FUNCTION : QCamera3HardwareInterface
409 *
410 * DESCRIPTION: constructor of QCamera3HardwareInterface
411 *
412 * PARAMETERS :
413 * @cameraId : camera ID
414 *
415 * RETURN : none
416 *==========================================================================*/
417QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
418 const camera_module_callbacks_t *callbacks)
419 : mCameraId(cameraId),
420 mCameraHandle(NULL),
421 mCameraInitialized(false),
422 mCallbackOps(NULL),
423 mMetadataChannel(NULL),
424 mPictureChannel(NULL),
425 mRawChannel(NULL),
426 mSupportChannel(NULL),
427 mAnalysisChannel(NULL),
428 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700429 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700430 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800431 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800432 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700433 mChannelHandle(0),
434 mFirstConfiguration(true),
435 mFlush(false),
436 mFlushPerf(false),
437 mParamHeap(NULL),
438 mParameters(NULL),
439 mPrevParameters(NULL),
440 m_bIsVideo(false),
441 m_bIs4KVideo(false),
442 m_bEisSupportedSize(false),
443 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800444 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700445 m_MobicatMask(0),
446 mMinProcessedFrameDuration(0),
447 mMinJpegFrameDuration(0),
448 mMinRawFrameDuration(0),
449 mMetaFrameCount(0U),
450 mUpdateDebugLevel(false),
451 mCallbacks(callbacks),
452 mCaptureIntent(0),
453 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700454 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800455 /* DevCamDebug metadata internal m control*/
456 mDevCamDebugMetaEnable(0),
457 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 mBatchSize(0),
459 mToBeQueuedVidBufs(0),
460 mHFRVideoFps(DEFAULT_VIDEO_FPS),
461 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800462 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800463 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700464 mFirstFrameNumberInBatch(0),
465 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800466 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700467 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
468 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000469 mPDSupported(false),
470 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700471 mInstantAEC(false),
472 mResetInstantAEC(false),
473 mInstantAECSettledFrameNumber(0),
474 mAecSkipDisplayFrameBound(0),
475 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800476 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700477 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mLastCustIntentFrmNum(-1),
479 mState(CLOSED),
480 mIsDeviceLinked(false),
481 mIsMainCamera(true),
482 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700483 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800484 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800485 mHdrPlusModeEnabled(false),
486 mIsApInputUsedForHdrPlus(false),
487 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800488 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700489{
490 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700491 mCommon.init(gCamCapability[cameraId]);
492 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700493#ifndef USE_HAL_3_3
494 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
495#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700497#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700498 mCameraDevice.common.close = close_camera_device;
499 mCameraDevice.ops = &mCameraOps;
500 mCameraDevice.priv = this;
501 gCamCapability[cameraId]->version = CAM_HAL_V3;
502 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
503 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
504 gCamCapability[cameraId]->min_num_pp_bufs = 3;
505
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800506 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700507
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800508 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mPendingLiveRequest = 0;
510 mCurrentRequestId = -1;
511 pthread_mutex_init(&mMutex, NULL);
512
513 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
514 mDefaultMetadata[i] = NULL;
515
516 // Getting system props of different kinds
517 char prop[PROPERTY_VALUE_MAX];
518 memset(prop, 0, sizeof(prop));
519 property_get("persist.camera.raw.dump", prop, "0");
520 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800521 property_get("persist.camera.hal3.force.hdr", prop, "0");
522 mForceHdrSnapshot = atoi(prop);
523
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 if (mEnableRawDump)
525 LOGD("Raw dump from Camera HAL enabled");
526
527 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
528 memset(mLdafCalib, 0, sizeof(mLdafCalib));
529
530 memset(prop, 0, sizeof(prop));
531 property_get("persist.camera.tnr.preview", prop, "0");
532 m_bTnrPreview = (uint8_t)atoi(prop);
533
534 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800535 property_get("persist.camera.swtnr.preview", prop, "1");
536 m_bSwTnrPreview = (uint8_t)atoi(prop);
537
538 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 property_get("persist.camera.tnr.video", prop, "0");
540 m_bTnrVideo = (uint8_t)atoi(prop);
541
542 memset(prop, 0, sizeof(prop));
543 property_get("persist.camera.avtimer.debug", prop, "0");
544 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800545 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700546
Thierry Strudel54dc9782017-02-15 12:12:10 -0800547 memset(prop, 0, sizeof(prop));
548 property_get("persist.camera.cacmode.disable", prop, "0");
549 m_cacModeDisabled = (uint8_t)atoi(prop);
550
Thierry Strudel3d639192016-09-09 11:52:26 -0700551 //Load and read GPU library.
552 lib_surface_utils = NULL;
553 LINK_get_surface_pixel_alignment = NULL;
554 mSurfaceStridePadding = CAM_PAD_TO_32;
555 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
556 if (lib_surface_utils) {
557 *(void **)&LINK_get_surface_pixel_alignment =
558 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
559 if (LINK_get_surface_pixel_alignment) {
560 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
561 }
562 dlclose(lib_surface_utils);
563 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700564
Emilian Peev0f3c3162017-03-15 12:57:46 +0000565 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
566 mPDSupported = (0 <= mPDIndex) ? true : false;
567
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700568 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700569}
570
571/*===========================================================================
572 * FUNCTION : ~QCamera3HardwareInterface
573 *
574 * DESCRIPTION: destructor of QCamera3HardwareInterface
575 *
576 * PARAMETERS : none
577 *
578 * RETURN : none
579 *==========================================================================*/
580QCamera3HardwareInterface::~QCamera3HardwareInterface()
581{
582 LOGD("E");
583
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800584 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700585
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800586 // Disable power hint and enable the perf lock for close camera
587 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
588 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
589
590 // unlink of dualcam during close camera
591 if (mIsDeviceLinked) {
592 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
593 &m_pDualCamCmdPtr->bundle_info;
594 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
595 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
596 pthread_mutex_lock(&gCamLock);
597
598 if (mIsMainCamera == 1) {
599 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
600 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
601 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
602 // related session id should be session id of linked session
603 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
604 } else {
605 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
606 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
607 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
608 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
609 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800610 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 pthread_mutex_unlock(&gCamLock);
612
613 rc = mCameraHandle->ops->set_dual_cam_cmd(
614 mCameraHandle->camera_handle);
615 if (rc < 0) {
616 LOGE("Dualcam: Unlink failed, but still proceed to close");
617 }
618 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700619
620 /* We need to stop all streams before deleting any stream */
621 if (mRawDumpChannel) {
622 mRawDumpChannel->stop();
623 }
624
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700625 if (mHdrPlusRawSrcChannel) {
626 mHdrPlusRawSrcChannel->stop();
627 }
628
Thierry Strudel3d639192016-09-09 11:52:26 -0700629 // NOTE: 'camera3_stream_t *' objects are already freed at
630 // this stage by the framework
631 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
632 it != mStreamInfo.end(); it++) {
633 QCamera3ProcessingChannel *channel = (*it)->channel;
634 if (channel) {
635 channel->stop();
636 }
637 }
638 if (mSupportChannel)
639 mSupportChannel->stop();
640
641 if (mAnalysisChannel) {
642 mAnalysisChannel->stop();
643 }
644 if (mMetadataChannel) {
645 mMetadataChannel->stop();
646 }
647 if (mChannelHandle) {
648 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
649 mChannelHandle);
650 LOGD("stopping channel %d", mChannelHandle);
651 }
652
653 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
654 it != mStreamInfo.end(); it++) {
655 QCamera3ProcessingChannel *channel = (*it)->channel;
656 if (channel)
657 delete channel;
658 free (*it);
659 }
660 if (mSupportChannel) {
661 delete mSupportChannel;
662 mSupportChannel = NULL;
663 }
664
665 if (mAnalysisChannel) {
666 delete mAnalysisChannel;
667 mAnalysisChannel = NULL;
668 }
669 if (mRawDumpChannel) {
670 delete mRawDumpChannel;
671 mRawDumpChannel = NULL;
672 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700673 if (mHdrPlusRawSrcChannel) {
674 delete mHdrPlusRawSrcChannel;
675 mHdrPlusRawSrcChannel = NULL;
676 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700677 if (mDummyBatchChannel) {
678 delete mDummyBatchChannel;
679 mDummyBatchChannel = NULL;
680 }
681
682 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800683 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700684
685 if (mMetadataChannel) {
686 delete mMetadataChannel;
687 mMetadataChannel = NULL;
688 }
689
690 /* Clean up all channels */
691 if (mCameraInitialized) {
692 if(!mFirstConfiguration){
693 //send the last unconfigure
694 cam_stream_size_info_t stream_config_info;
695 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
696 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
697 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800698 m_bIs4KVideo ? 0 :
699 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700700 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700701 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
702 stream_config_info);
703 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
704 if (rc < 0) {
705 LOGE("set_parms failed for unconfigure");
706 }
707 }
708 deinitParameters();
709 }
710
711 if (mChannelHandle) {
712 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
713 mChannelHandle);
714 LOGH("deleting channel %d", mChannelHandle);
715 mChannelHandle = 0;
716 }
717
718 if (mState != CLOSED)
719 closeCamera();
720
721 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
722 req.mPendingBufferList.clear();
723 }
724 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700725 for (pendingRequestIterator i = mPendingRequestsList.begin();
726 i != mPendingRequestsList.end();) {
727 i = erasePendingRequest(i);
728 }
729 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
730 if (mDefaultMetadata[i])
731 free_camera_metadata(mDefaultMetadata[i]);
732
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800733 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700734
735 pthread_cond_destroy(&mRequestCond);
736
737 pthread_cond_destroy(&mBuffersCond);
738
739 pthread_mutex_destroy(&mMutex);
740 LOGD("X");
741}
742
743/*===========================================================================
744 * FUNCTION : erasePendingRequest
745 *
746 * DESCRIPTION: function to erase a desired pending request after freeing any
747 * allocated memory
748 *
749 * PARAMETERS :
750 * @i : iterator pointing to pending request to be erased
751 *
752 * RETURN : iterator pointing to the next request
753 *==========================================================================*/
754QCamera3HardwareInterface::pendingRequestIterator
755 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
756{
757 if (i->input_buffer != NULL) {
758 free(i->input_buffer);
759 i->input_buffer = NULL;
760 }
761 if (i->settings != NULL)
762 free_camera_metadata((camera_metadata_t*)i->settings);
763 return mPendingRequestsList.erase(i);
764}
765
766/*===========================================================================
767 * FUNCTION : camEvtHandle
768 *
769 * DESCRIPTION: Function registered to mm-camera-interface to handle events
770 *
771 * PARAMETERS :
772 * @camera_handle : interface layer camera handle
773 * @evt : ptr to event
774 * @user_data : user data ptr
775 *
776 * RETURN : none
777 *==========================================================================*/
778void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
779 mm_camera_event_t *evt,
780 void *user_data)
781{
782 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
783 if (obj && evt) {
784 switch(evt->server_event_type) {
785 case CAM_EVENT_TYPE_DAEMON_DIED:
786 pthread_mutex_lock(&obj->mMutex);
787 obj->mState = ERROR;
788 pthread_mutex_unlock(&obj->mMutex);
789 LOGE("Fatal, camera daemon died");
790 break;
791
792 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
793 LOGD("HAL got request pull from Daemon");
794 pthread_mutex_lock(&obj->mMutex);
795 obj->mWokenUpByDaemon = true;
796 obj->unblockRequestIfNecessary();
797 pthread_mutex_unlock(&obj->mMutex);
798 break;
799
800 default:
801 LOGW("Warning: Unhandled event %d",
802 evt->server_event_type);
803 break;
804 }
805 } else {
806 LOGE("NULL user_data/evt");
807 }
808}
809
810/*===========================================================================
811 * FUNCTION : openCamera
812 *
813 * DESCRIPTION: open camera
814 *
815 * PARAMETERS :
816 * @hw_device : double ptr for camera device struct
817 *
818 * RETURN : int32_t type of status
819 * NO_ERROR -- success
820 * none-zero failure code
821 *==========================================================================*/
822int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
823{
824 int rc = 0;
825 if (mState != CLOSED) {
826 *hw_device = NULL;
827 return PERMISSION_DENIED;
828 }
829
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800830 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
832 mCameraId);
833
834 rc = openCamera();
835 if (rc == 0) {
836 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800837 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700838 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800839 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700840
Thierry Strudel3d639192016-09-09 11:52:26 -0700841 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
842 mCameraId, rc);
843
844 if (rc == NO_ERROR) {
845 mState = OPENED;
846 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800847
Thierry Strudel3d639192016-09-09 11:52:26 -0700848 return rc;
849}
850
851/*===========================================================================
852 * FUNCTION : openCamera
853 *
854 * DESCRIPTION: open camera
855 *
856 * PARAMETERS : none
857 *
858 * RETURN : int32_t type of status
859 * NO_ERROR -- success
860 * none-zero failure code
861 *==========================================================================*/
862int QCamera3HardwareInterface::openCamera()
863{
864 int rc = 0;
865 char value[PROPERTY_VALUE_MAX];
866
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800867 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700868 if (mCameraHandle) {
869 LOGE("Failure: Camera already opened");
870 return ALREADY_EXISTS;
871 }
872
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700873 {
874 Mutex::Autolock l(gHdrPlusClientLock);
875 if (gEaselManagerClient.isEaselPresentOnDevice()) {
876 rc = gEaselManagerClient.resume();
877 if (rc != 0) {
878 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
879 return rc;
880 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800881 }
882 }
883
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
885 if (rc < 0) {
886 LOGE("Failed to reserve flash for camera id: %d",
887 mCameraId);
888 return UNKNOWN_ERROR;
889 }
890
891 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
892 if (rc) {
893 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
894 return rc;
895 }
896
897 if (!mCameraHandle) {
898 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
899 return -ENODEV;
900 }
901
902 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
903 camEvtHandle, (void *)this);
904
905 if (rc < 0) {
906 LOGE("Error, failed to register event callback");
907 /* Not closing camera here since it is already handled in destructor */
908 return FAILED_TRANSACTION;
909 }
910
911 mExifParams.debug_params =
912 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
913 if (mExifParams.debug_params) {
914 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
915 } else {
916 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
917 return NO_MEMORY;
918 }
919 mFirstConfiguration = true;
920
921 //Notify display HAL that a camera session is active.
922 //But avoid calling the same during bootup because camera service might open/close
923 //cameras at boot time during its initialization and display service will also internally
924 //wait for camera service to initialize first while calling this display API, resulting in a
925 //deadlock situation. Since boot time camera open/close calls are made only to fetch
926 //capabilities, no need of this display bw optimization.
927 //Use "service.bootanim.exit" property to know boot status.
928 property_get("service.bootanim.exit", value, "0");
929 if (atoi(value) == 1) {
930 pthread_mutex_lock(&gCamLock);
931 if (gNumCameraSessions++ == 0) {
932 setCameraLaunchStatus(true);
933 }
934 pthread_mutex_unlock(&gCamLock);
935 }
936
937 //fill the session id needed while linking dual cam
938 pthread_mutex_lock(&gCamLock);
939 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
940 &sessionId[mCameraId]);
941 pthread_mutex_unlock(&gCamLock);
942
943 if (rc < 0) {
944 LOGE("Error, failed to get sessiion id");
945 return UNKNOWN_ERROR;
946 } else {
947 //Allocate related cam sync buffer
948 //this is needed for the payload that goes along with bundling cmd for related
949 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700950 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
951 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700952 if(rc != OK) {
953 rc = NO_MEMORY;
954 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
955 return NO_MEMORY;
956 }
957
958 //Map memory for related cam sync buffer
959 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700960 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
961 m_pDualCamCmdHeap->getFd(0),
962 sizeof(cam_dual_camera_cmd_info_t),
963 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700964 if(rc < 0) {
965 LOGE("Dualcam: failed to map Related cam sync buffer");
966 rc = FAILED_TRANSACTION;
967 return NO_MEMORY;
968 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700969 m_pDualCamCmdPtr =
970 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700971 }
972
973 LOGH("mCameraId=%d",mCameraId);
974
975 return NO_ERROR;
976}
977
978/*===========================================================================
979 * FUNCTION : closeCamera
980 *
981 * DESCRIPTION: close camera
982 *
983 * PARAMETERS : none
984 *
985 * RETURN : int32_t type of status
986 * NO_ERROR -- success
987 * none-zero failure code
988 *==========================================================================*/
989int QCamera3HardwareInterface::closeCamera()
990{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800991 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700992 int rc = NO_ERROR;
993 char value[PROPERTY_VALUE_MAX];
994
995 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
996 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700997
998 // unmap memory for related cam sync buffer
999 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001000 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 if (NULL != m_pDualCamCmdHeap) {
1002 m_pDualCamCmdHeap->deallocate();
1003 delete m_pDualCamCmdHeap;
1004 m_pDualCamCmdHeap = NULL;
1005 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001006 }
1007
Thierry Strudel3d639192016-09-09 11:52:26 -07001008 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1009 mCameraHandle = NULL;
1010
1011 //reset session id to some invalid id
1012 pthread_mutex_lock(&gCamLock);
1013 sessionId[mCameraId] = 0xDEADBEEF;
1014 pthread_mutex_unlock(&gCamLock);
1015
1016 //Notify display HAL that there is no active camera session
1017 //but avoid calling the same during bootup. Refer to openCamera
1018 //for more details.
1019 property_get("service.bootanim.exit", value, "0");
1020 if (atoi(value) == 1) {
1021 pthread_mutex_lock(&gCamLock);
1022 if (--gNumCameraSessions == 0) {
1023 setCameraLaunchStatus(false);
1024 }
1025 pthread_mutex_unlock(&gCamLock);
1026 }
1027
Thierry Strudel3d639192016-09-09 11:52:26 -07001028 if (mExifParams.debug_params) {
1029 free(mExifParams.debug_params);
1030 mExifParams.debug_params = NULL;
1031 }
1032 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1033 LOGW("Failed to release flash for camera id: %d",
1034 mCameraId);
1035 }
1036 mState = CLOSED;
1037 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1038 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001039
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001040 {
1041 Mutex::Autolock l(gHdrPlusClientLock);
1042 if (gHdrPlusClient != nullptr) {
1043 // Disable HDR+ mode.
1044 disableHdrPlusModeLocked();
1045 // Disconnect Easel if it's connected.
1046 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1047 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001048 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001049
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001050 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001051 rc = gEaselManagerClient.stopMipi(mCameraId);
1052 if (rc != 0) {
1053 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1054 }
1055
1056 rc = gEaselManagerClient.suspend();
1057 if (rc != 0) {
1058 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1059 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001060 }
1061 }
1062
Thierry Strudel3d639192016-09-09 11:52:26 -07001063 return rc;
1064}
1065
1066/*===========================================================================
1067 * FUNCTION : initialize
1068 *
1069 * DESCRIPTION: Initialize frameworks callback functions
1070 *
1071 * PARAMETERS :
1072 * @callback_ops : callback function to frameworks
1073 *
1074 * RETURN :
1075 *
1076 *==========================================================================*/
1077int QCamera3HardwareInterface::initialize(
1078 const struct camera3_callback_ops *callback_ops)
1079{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001080 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001081 int rc;
1082
1083 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1084 pthread_mutex_lock(&mMutex);
1085
1086 // Validate current state
1087 switch (mState) {
1088 case OPENED:
1089 /* valid state */
1090 break;
1091 default:
1092 LOGE("Invalid state %d", mState);
1093 rc = -ENODEV;
1094 goto err1;
1095 }
1096
1097 rc = initParameters();
1098 if (rc < 0) {
1099 LOGE("initParamters failed %d", rc);
1100 goto err1;
1101 }
1102 mCallbackOps = callback_ops;
1103
1104 mChannelHandle = mCameraHandle->ops->add_channel(
1105 mCameraHandle->camera_handle, NULL, NULL, this);
1106 if (mChannelHandle == 0) {
1107 LOGE("add_channel failed");
1108 rc = -ENOMEM;
1109 pthread_mutex_unlock(&mMutex);
1110 return rc;
1111 }
1112
1113 pthread_mutex_unlock(&mMutex);
1114 mCameraInitialized = true;
1115 mState = INITIALIZED;
1116 LOGI("X");
1117 return 0;
1118
1119err1:
1120 pthread_mutex_unlock(&mMutex);
1121 return rc;
1122}
1123
1124/*===========================================================================
1125 * FUNCTION : validateStreamDimensions
1126 *
1127 * DESCRIPTION: Check if the configuration requested are those advertised
1128 *
1129 * PARAMETERS :
1130 * @stream_list : streams to be configured
1131 *
1132 * RETURN :
1133 *
1134 *==========================================================================*/
1135int QCamera3HardwareInterface::validateStreamDimensions(
1136 camera3_stream_configuration_t *streamList)
1137{
1138 int rc = NO_ERROR;
1139 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001140 uint32_t depthWidth = 0;
1141 uint32_t depthHeight = 0;
1142 if (mPDSupported) {
1143 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1144 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1145 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001146
1147 camera3_stream_t *inputStream = NULL;
1148 /*
1149 * Loop through all streams to find input stream if it exists*
1150 */
1151 for (size_t i = 0; i< streamList->num_streams; i++) {
1152 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1153 if (inputStream != NULL) {
1154 LOGE("Error, Multiple input streams requested");
1155 return -EINVAL;
1156 }
1157 inputStream = streamList->streams[i];
1158 }
1159 }
1160 /*
1161 * Loop through all streams requested in configuration
1162 * Check if unsupported sizes have been requested on any of them
1163 */
1164 for (size_t j = 0; j < streamList->num_streams; j++) {
1165 bool sizeFound = false;
1166 camera3_stream_t *newStream = streamList->streams[j];
1167
1168 uint32_t rotatedHeight = newStream->height;
1169 uint32_t rotatedWidth = newStream->width;
1170 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1171 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1172 rotatedHeight = newStream->width;
1173 rotatedWidth = newStream->height;
1174 }
1175
1176 /*
1177 * Sizes are different for each type of stream format check against
1178 * appropriate table.
1179 */
1180 switch (newStream->format) {
1181 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1182 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1183 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001184 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1185 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1186 mPDSupported) {
1187 if ((depthWidth == newStream->width) &&
1188 (depthHeight == newStream->height)) {
1189 sizeFound = true;
1190 }
1191 break;
1192 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001193 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1194 for (size_t i = 0; i < count; i++) {
1195 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1196 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1197 sizeFound = true;
1198 break;
1199 }
1200 }
1201 break;
1202 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1204 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001205 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001206 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001207 if ((depthSamplesCount == newStream->width) &&
1208 (1 == newStream->height)) {
1209 sizeFound = true;
1210 }
1211 break;
1212 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001213 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1214 /* Verify set size against generated sizes table */
1215 for (size_t i = 0; i < count; i++) {
1216 if (((int32_t)rotatedWidth ==
1217 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1218 ((int32_t)rotatedHeight ==
1219 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1220 sizeFound = true;
1221 break;
1222 }
1223 }
1224 break;
1225 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1226 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1227 default:
1228 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1229 || newStream->stream_type == CAMERA3_STREAM_INPUT
1230 || IS_USAGE_ZSL(newStream->usage)) {
1231 if (((int32_t)rotatedWidth ==
1232 gCamCapability[mCameraId]->active_array_size.width) &&
1233 ((int32_t)rotatedHeight ==
1234 gCamCapability[mCameraId]->active_array_size.height)) {
1235 sizeFound = true;
1236 break;
1237 }
1238 /* We could potentially break here to enforce ZSL stream
1239 * set from frameworks always is full active array size
1240 * but it is not clear from the spc if framework will always
1241 * follow that, also we have logic to override to full array
1242 * size, so keeping the logic lenient at the moment
1243 */
1244 }
1245 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1246 MAX_SIZES_CNT);
1247 for (size_t i = 0; i < count; i++) {
1248 if (((int32_t)rotatedWidth ==
1249 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1250 ((int32_t)rotatedHeight ==
1251 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1252 sizeFound = true;
1253 break;
1254 }
1255 }
1256 break;
1257 } /* End of switch(newStream->format) */
1258
1259 /* We error out even if a single stream has unsupported size set */
1260 if (!sizeFound) {
1261 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1262 rotatedWidth, rotatedHeight, newStream->format,
1263 gCamCapability[mCameraId]->active_array_size.width,
1264 gCamCapability[mCameraId]->active_array_size.height);
1265 rc = -EINVAL;
1266 break;
1267 }
1268 } /* End of for each stream */
1269 return rc;
1270}
1271
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001272/*===========================================================================
1273 * FUNCTION : validateUsageFlags
1274 *
1275 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1276 *
1277 * PARAMETERS :
1278 * @stream_list : streams to be configured
1279 *
1280 * RETURN :
1281 * NO_ERROR if the usage flags are supported
1282 * error code if usage flags are not supported
1283 *
1284 *==========================================================================*/
1285int QCamera3HardwareInterface::validateUsageFlags(
1286 const camera3_stream_configuration_t* streamList)
1287{
1288 for (size_t j = 0; j < streamList->num_streams; j++) {
1289 const camera3_stream_t *newStream = streamList->streams[j];
1290
1291 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1292 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1293 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1294 continue;
1295 }
1296
1297 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1298 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1299 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1300 bool forcePreviewUBWC = true;
1301 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1302 forcePreviewUBWC = false;
1303 }
1304 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1305 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1306 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1307 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1308 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1309 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1310
1311 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1312 // So color spaces will always match.
1313
1314 // Check whether underlying formats of shared streams match.
1315 if (isVideo && isPreview && videoFormat != previewFormat) {
1316 LOGE("Combined video and preview usage flag is not supported");
1317 return -EINVAL;
1318 }
1319 if (isPreview && isZSL && previewFormat != zslFormat) {
1320 LOGE("Combined preview and zsl usage flag is not supported");
1321 return -EINVAL;
1322 }
1323 if (isVideo && isZSL && videoFormat != zslFormat) {
1324 LOGE("Combined video and zsl usage flag is not supported");
1325 return -EINVAL;
1326 }
1327 }
1328 return NO_ERROR;
1329}
1330
1331/*===========================================================================
1332 * FUNCTION : validateUsageFlagsForEis
1333 *
1334 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1335 *
1336 * PARAMETERS :
1337 * @stream_list : streams to be configured
1338 *
1339 * RETURN :
1340 * NO_ERROR if the usage flags are supported
1341 * error code if usage flags are not supported
1342 *
1343 *==========================================================================*/
1344int QCamera3HardwareInterface::validateUsageFlagsForEis(
1345 const camera3_stream_configuration_t* streamList)
1346{
1347 for (size_t j = 0; j < streamList->num_streams; j++) {
1348 const camera3_stream_t *newStream = streamList->streams[j];
1349
1350 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1351 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1352
1353 // Because EIS is "hard-coded" for certain use case, and current
1354 // implementation doesn't support shared preview and video on the same
1355 // stream, return failure if EIS is forced on.
1356 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1357 LOGE("Combined video and preview usage flag is not supported due to EIS");
1358 return -EINVAL;
1359 }
1360 }
1361 return NO_ERROR;
1362}
1363
Thierry Strudel3d639192016-09-09 11:52:26 -07001364/*==============================================================================
1365 * FUNCTION : isSupportChannelNeeded
1366 *
1367 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1368 *
1369 * PARAMETERS :
1370 * @stream_list : streams to be configured
1371 * @stream_config_info : the config info for streams to be configured
1372 *
1373 * RETURN : Boolen true/false decision
1374 *
1375 *==========================================================================*/
1376bool QCamera3HardwareInterface::isSupportChannelNeeded(
1377 camera3_stream_configuration_t *streamList,
1378 cam_stream_size_info_t stream_config_info)
1379{
1380 uint32_t i;
1381 bool pprocRequested = false;
1382 /* Check for conditions where PProc pipeline does not have any streams*/
1383 for (i = 0; i < stream_config_info.num_streams; i++) {
1384 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1385 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1386 pprocRequested = true;
1387 break;
1388 }
1389 }
1390
1391 if (pprocRequested == false )
1392 return true;
1393
1394 /* Dummy stream needed if only raw or jpeg streams present */
1395 for (i = 0; i < streamList->num_streams; i++) {
1396 switch(streamList->streams[i]->format) {
1397 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1398 case HAL_PIXEL_FORMAT_RAW10:
1399 case HAL_PIXEL_FORMAT_RAW16:
1400 case HAL_PIXEL_FORMAT_BLOB:
1401 break;
1402 default:
1403 return false;
1404 }
1405 }
1406 return true;
1407}
1408
1409/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001410 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001411 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001412 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001413 *
1414 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001415 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001416 *
1417 * RETURN : int32_t type of status
1418 * NO_ERROR -- success
1419 * none-zero failure code
1420 *
1421 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001422int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001423{
1424 int32_t rc = NO_ERROR;
1425
1426 cam_dimension_t max_dim = {0, 0};
1427 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1428 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1429 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1430 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1431 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1432 }
1433
1434 clear_metadata_buffer(mParameters);
1435
1436 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1437 max_dim);
1438 if (rc != NO_ERROR) {
1439 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1440 return rc;
1441 }
1442
1443 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1444 if (rc != NO_ERROR) {
1445 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1446 return rc;
1447 }
1448
1449 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001450 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001451
1452 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1453 mParameters);
1454 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001455 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001456 return rc;
1457 }
1458
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001459 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001460 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1461 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1462 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1463 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1464 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001465
1466 return rc;
1467}
1468
1469/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001470 * FUNCTION : addToPPFeatureMask
1471 *
1472 * DESCRIPTION: add additional features to pp feature mask based on
1473 * stream type and usecase
1474 *
1475 * PARAMETERS :
1476 * @stream_format : stream type for feature mask
1477 * @stream_idx : stream idx within postprocess_mask list to change
1478 *
1479 * RETURN : NULL
1480 *
1481 *==========================================================================*/
1482void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1483 uint32_t stream_idx)
1484{
1485 char feature_mask_value[PROPERTY_VALUE_MAX];
1486 cam_feature_mask_t feature_mask;
1487 int args_converted;
1488 int property_len;
1489
1490 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001491#ifdef _LE_CAMERA_
1492 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1493 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1494 property_len = property_get("persist.camera.hal3.feature",
1495 feature_mask_value, swtnr_feature_mask_value);
1496#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 property_len = property_get("persist.camera.hal3.feature",
1498 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001499#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001500 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1501 (feature_mask_value[1] == 'x')) {
1502 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1503 } else {
1504 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1505 }
1506 if (1 != args_converted) {
1507 feature_mask = 0;
1508 LOGE("Wrong feature mask %s", feature_mask_value);
1509 return;
1510 }
1511
1512 switch (stream_format) {
1513 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1514 /* Add LLVD to pp feature mask only if video hint is enabled */
1515 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1516 mStreamConfigInfo.postprocess_mask[stream_idx]
1517 |= CAM_QTI_FEATURE_SW_TNR;
1518 LOGH("Added SW TNR to pp feature mask");
1519 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1520 mStreamConfigInfo.postprocess_mask[stream_idx]
1521 |= CAM_QCOM_FEATURE_LLVD;
1522 LOGH("Added LLVD SeeMore to pp feature mask");
1523 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001524 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1525 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1526 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1527 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001528 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1529 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1530 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1531 CAM_QTI_FEATURE_BINNING_CORRECTION;
1532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 break;
1534 }
1535 default:
1536 break;
1537 }
1538 LOGD("PP feature mask %llx",
1539 mStreamConfigInfo.postprocess_mask[stream_idx]);
1540}
1541
1542/*==============================================================================
1543 * FUNCTION : updateFpsInPreviewBuffer
1544 *
1545 * DESCRIPTION: update FPS information in preview buffer.
1546 *
1547 * PARAMETERS :
1548 * @metadata : pointer to metadata buffer
1549 * @frame_number: frame_number to look for in pending buffer list
1550 *
1551 * RETURN : None
1552 *
1553 *==========================================================================*/
1554void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1555 uint32_t frame_number)
1556{
1557 // Mark all pending buffers for this particular request
1558 // with corresponding framerate information
1559 for (List<PendingBuffersInRequest>::iterator req =
1560 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1561 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1562 for(List<PendingBufferInfo>::iterator j =
1563 req->mPendingBufferList.begin();
1564 j != req->mPendingBufferList.end(); j++) {
1565 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1566 if ((req->frame_number == frame_number) &&
1567 (channel->getStreamTypeMask() &
1568 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1569 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1570 CAM_INTF_PARM_FPS_RANGE, metadata) {
1571 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1572 struct private_handle_t *priv_handle =
1573 (struct private_handle_t *)(*(j->buffer));
1574 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1575 }
1576 }
1577 }
1578 }
1579}
1580
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001581/*==============================================================================
1582 * FUNCTION : updateTimeStampInPendingBuffers
1583 *
1584 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1585 * of a frame number
1586 *
1587 * PARAMETERS :
1588 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1589 * @timestamp : timestamp to be set
1590 *
1591 * RETURN : None
1592 *
1593 *==========================================================================*/
1594void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1595 uint32_t frameNumber, nsecs_t timestamp)
1596{
1597 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1598 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1599 if (req->frame_number != frameNumber)
1600 continue;
1601
1602 for (auto k = req->mPendingBufferList.begin();
1603 k != req->mPendingBufferList.end(); k++ ) {
1604 struct private_handle_t *priv_handle =
1605 (struct private_handle_t *) (*(k->buffer));
1606 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1607 }
1608 }
1609 return;
1610}
1611
Thierry Strudel3d639192016-09-09 11:52:26 -07001612/*===========================================================================
1613 * FUNCTION : configureStreams
1614 *
1615 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1616 * and output streams.
1617 *
1618 * PARAMETERS :
1619 * @stream_list : streams to be configured
1620 *
1621 * RETURN :
1622 *
1623 *==========================================================================*/
1624int QCamera3HardwareInterface::configureStreams(
1625 camera3_stream_configuration_t *streamList)
1626{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001627 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001628 int rc = 0;
1629
1630 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001631 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001632 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001633 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001634
1635 return rc;
1636}
1637
1638/*===========================================================================
1639 * FUNCTION : configureStreamsPerfLocked
1640 *
1641 * DESCRIPTION: configureStreams while perfLock is held.
1642 *
1643 * PARAMETERS :
1644 * @stream_list : streams to be configured
1645 *
1646 * RETURN : int32_t type of status
1647 * NO_ERROR -- success
1648 * none-zero failure code
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreamsPerfLocked(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Sanity check stream_list
1657 if (streamList == NULL) {
1658 LOGE("NULL stream configuration");
1659 return BAD_VALUE;
1660 }
1661 if (streamList->streams == NULL) {
1662 LOGE("NULL stream list");
1663 return BAD_VALUE;
1664 }
1665
1666 if (streamList->num_streams < 1) {
1667 LOGE("Bad number of streams requested: %d",
1668 streamList->num_streams);
1669 return BAD_VALUE;
1670 }
1671
1672 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1673 LOGE("Maximum number of streams %d exceeded: %d",
1674 MAX_NUM_STREAMS, streamList->num_streams);
1675 return BAD_VALUE;
1676 }
1677
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001678 rc = validateUsageFlags(streamList);
1679 if (rc != NO_ERROR) {
1680 return rc;
1681 }
1682
Thierry Strudel3d639192016-09-09 11:52:26 -07001683 mOpMode = streamList->operation_mode;
1684 LOGD("mOpMode: %d", mOpMode);
1685
1686 /* first invalidate all the steams in the mStreamList
1687 * if they appear again, they will be validated */
1688 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1689 it != mStreamInfo.end(); it++) {
1690 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1691 if (channel) {
1692 channel->stop();
1693 }
1694 (*it)->status = INVALID;
1695 }
1696
1697 if (mRawDumpChannel) {
1698 mRawDumpChannel->stop();
1699 delete mRawDumpChannel;
1700 mRawDumpChannel = NULL;
1701 }
1702
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001703 if (mHdrPlusRawSrcChannel) {
1704 mHdrPlusRawSrcChannel->stop();
1705 delete mHdrPlusRawSrcChannel;
1706 mHdrPlusRawSrcChannel = NULL;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 if (mSupportChannel)
1710 mSupportChannel->stop();
1711
1712 if (mAnalysisChannel) {
1713 mAnalysisChannel->stop();
1714 }
1715 if (mMetadataChannel) {
1716 /* If content of mStreamInfo is not 0, there is metadata stream */
1717 mMetadataChannel->stop();
1718 }
1719 if (mChannelHandle) {
1720 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1721 mChannelHandle);
1722 LOGD("stopping channel %d", mChannelHandle);
1723 }
1724
1725 pthread_mutex_lock(&mMutex);
1726
1727 // Check state
1728 switch (mState) {
1729 case INITIALIZED:
1730 case CONFIGURED:
1731 case STARTED:
1732 /* valid state */
1733 break;
1734 default:
1735 LOGE("Invalid state %d", mState);
1736 pthread_mutex_unlock(&mMutex);
1737 return -ENODEV;
1738 }
1739
1740 /* Check whether we have video stream */
1741 m_bIs4KVideo = false;
1742 m_bIsVideo = false;
1743 m_bEisSupportedSize = false;
1744 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001745 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001746 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001747 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001748 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001749 uint32_t videoWidth = 0U;
1750 uint32_t videoHeight = 0U;
1751 size_t rawStreamCnt = 0;
1752 size_t stallStreamCnt = 0;
1753 size_t processedStreamCnt = 0;
1754 // Number of streams on ISP encoder path
1755 size_t numStreamsOnEncoder = 0;
1756 size_t numYuv888OnEncoder = 0;
1757 bool bYuv888OverrideJpeg = false;
1758 cam_dimension_t largeYuv888Size = {0, 0};
1759 cam_dimension_t maxViewfinderSize = {0, 0};
1760 bool bJpegExceeds4K = false;
1761 bool bJpegOnEncoder = false;
1762 bool bUseCommonFeatureMask = false;
1763 cam_feature_mask_t commonFeatureMask = 0;
1764 bool bSmallJpegSize = false;
1765 uint32_t width_ratio;
1766 uint32_t height_ratio;
1767 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1768 camera3_stream_t *inputStream = NULL;
1769 bool isJpeg = false;
1770 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001771 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001772 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001773
1774 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1775
1776 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001777 uint8_t eis_prop_set;
1778 uint32_t maxEisWidth = 0;
1779 uint32_t maxEisHeight = 0;
1780
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001781 // Initialize all instant AEC related variables
1782 mInstantAEC = false;
1783 mResetInstantAEC = false;
1784 mInstantAECSettledFrameNumber = 0;
1785 mAecSkipDisplayFrameBound = 0;
1786 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001787 mCurrFeatureState = 0;
1788 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001789
Thierry Strudel3d639192016-09-09 11:52:26 -07001790 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1791
1792 size_t count = IS_TYPE_MAX;
1793 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1794 for (size_t i = 0; i < count; i++) {
1795 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001796 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1797 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001798 break;
1799 }
1800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001801
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001802 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 maxEisWidth = MAX_EIS_WIDTH;
1804 maxEisHeight = MAX_EIS_HEIGHT;
1805 }
1806
1807 /* EIS setprop control */
1808 char eis_prop[PROPERTY_VALUE_MAX];
1809 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001810 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001811 eis_prop_set = (uint8_t)atoi(eis_prop);
1812
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001813 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001814 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1815
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001816 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1817 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* stream configurations */
1820 for (size_t i = 0; i < streamList->num_streams; i++) {
1821 camera3_stream_t *newStream = streamList->streams[i];
1822 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1823 "height = %d, rotation = %d, usage = 0x%x",
1824 i, newStream->stream_type, newStream->format,
1825 newStream->width, newStream->height, newStream->rotation,
1826 newStream->usage);
1827 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1828 newStream->stream_type == CAMERA3_STREAM_INPUT){
1829 isZsl = true;
1830 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001831 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1832 IS_USAGE_PREVIEW(newStream->usage)) {
1833 isPreview = true;
1834 }
1835
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1837 inputStream = newStream;
1838 }
1839
Emilian Peev7650c122017-01-19 08:24:33 -08001840 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1841 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 isJpeg = true;
1843 jpegSize.width = newStream->width;
1844 jpegSize.height = newStream->height;
1845 if (newStream->width > VIDEO_4K_WIDTH ||
1846 newStream->height > VIDEO_4K_HEIGHT)
1847 bJpegExceeds4K = true;
1848 }
1849
1850 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1851 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1852 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001853 // In HAL3 we can have multiple different video streams.
1854 // The variables video width and height are used below as
1855 // dimensions of the biggest of them
1856 if (videoWidth < newStream->width ||
1857 videoHeight < newStream->height) {
1858 videoWidth = newStream->width;
1859 videoHeight = newStream->height;
1860 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1862 (VIDEO_4K_HEIGHT <= newStream->height)) {
1863 m_bIs4KVideo = true;
1864 }
1865 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1866 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001867
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 }
1869 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1870 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1871 switch (newStream->format) {
1872 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001873 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1874 depthPresent = true;
1875 break;
1876 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001877 stallStreamCnt++;
1878 if (isOnEncoder(maxViewfinderSize, newStream->width,
1879 newStream->height)) {
1880 numStreamsOnEncoder++;
1881 bJpegOnEncoder = true;
1882 }
1883 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1884 newStream->width);
1885 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1886 newStream->height);;
1887 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1888 "FATAL: max_downscale_factor cannot be zero and so assert");
1889 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1890 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1891 LOGH("Setting small jpeg size flag to true");
1892 bSmallJpegSize = true;
1893 }
1894 break;
1895 case HAL_PIXEL_FORMAT_RAW10:
1896 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1897 case HAL_PIXEL_FORMAT_RAW16:
1898 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001899 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1900 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1901 pdStatCount++;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 break;
1904 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1905 processedStreamCnt++;
1906 if (isOnEncoder(maxViewfinderSize, newStream->width,
1907 newStream->height)) {
1908 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1909 !IS_USAGE_ZSL(newStream->usage)) {
1910 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1911 }
1912 numStreamsOnEncoder++;
1913 }
1914 break;
1915 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1916 processedStreamCnt++;
1917 if (isOnEncoder(maxViewfinderSize, newStream->width,
1918 newStream->height)) {
1919 // If Yuv888 size is not greater than 4K, set feature mask
1920 // to SUPERSET so that it support concurrent request on
1921 // YUV and JPEG.
1922 if (newStream->width <= VIDEO_4K_WIDTH &&
1923 newStream->height <= VIDEO_4K_HEIGHT) {
1924 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1925 }
1926 numStreamsOnEncoder++;
1927 numYuv888OnEncoder++;
1928 largeYuv888Size.width = newStream->width;
1929 largeYuv888Size.height = newStream->height;
1930 }
1931 break;
1932 default:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 numStreamsOnEncoder++;
1938 }
1939 break;
1940 }
1941
1942 }
1943 }
1944
1945 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1946 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1947 !m_bIsVideo) {
1948 m_bEisEnable = false;
1949 }
1950
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001951 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1952 pthread_mutex_unlock(&mMutex);
1953 return -EINVAL;
1954 }
1955
Thierry Strudel54dc9782017-02-15 12:12:10 -08001956 uint8_t forceEnableTnr = 0;
1957 char tnr_prop[PROPERTY_VALUE_MAX];
1958 memset(tnr_prop, 0, sizeof(tnr_prop));
1959 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1960 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1961
Thierry Strudel3d639192016-09-09 11:52:26 -07001962 /* Logic to enable/disable TNR based on specific config size/etc.*/
1963 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1964 ((videoWidth == 1920 && videoHeight == 1080) ||
1965 (videoWidth == 1280 && videoHeight == 720)) &&
1966 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1967 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001968 else if (forceEnableTnr)
1969 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001970
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001971 char videoHdrProp[PROPERTY_VALUE_MAX];
1972 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1973 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1974 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1975
1976 if (hdr_mode_prop == 1 && m_bIsVideo &&
1977 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1978 m_bVideoHdrEnabled = true;
1979 else
1980 m_bVideoHdrEnabled = false;
1981
1982
Thierry Strudel3d639192016-09-09 11:52:26 -07001983 /* Check if num_streams is sane */
1984 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1985 rawStreamCnt > MAX_RAW_STREAMS ||
1986 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1987 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1988 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1989 pthread_mutex_unlock(&mMutex);
1990 return -EINVAL;
1991 }
1992 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001993 if (isZsl && m_bIs4KVideo) {
1994 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001995 pthread_mutex_unlock(&mMutex);
1996 return -EINVAL;
1997 }
1998 /* Check if stream sizes are sane */
1999 if (numStreamsOnEncoder > 2) {
2000 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2001 pthread_mutex_unlock(&mMutex);
2002 return -EINVAL;
2003 } else if (1 < numStreamsOnEncoder){
2004 bUseCommonFeatureMask = true;
2005 LOGH("Multiple streams above max viewfinder size, common mask needed");
2006 }
2007
2008 /* Check if BLOB size is greater than 4k in 4k recording case */
2009 if (m_bIs4KVideo && bJpegExceeds4K) {
2010 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2011 pthread_mutex_unlock(&mMutex);
2012 return -EINVAL;
2013 }
2014
Emilian Peev7650c122017-01-19 08:24:33 -08002015 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2016 depthPresent) {
2017 LOGE("HAL doesn't support depth streams in HFR mode!");
2018 pthread_mutex_unlock(&mMutex);
2019 return -EINVAL;
2020 }
2021
Thierry Strudel3d639192016-09-09 11:52:26 -07002022 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2023 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2024 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2025 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2026 // configurations:
2027 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2028 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2029 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2030 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2031 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2032 __func__);
2033 pthread_mutex_unlock(&mMutex);
2034 return -EINVAL;
2035 }
2036
2037 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2038 // the YUV stream's size is greater or equal to the JPEG size, set common
2039 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2040 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2041 jpegSize.width, jpegSize.height) &&
2042 largeYuv888Size.width > jpegSize.width &&
2043 largeYuv888Size.height > jpegSize.height) {
2044 bYuv888OverrideJpeg = true;
2045 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2046 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2047 }
2048
2049 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2050 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2051 commonFeatureMask);
2052 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2053 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2054
2055 rc = validateStreamDimensions(streamList);
2056 if (rc == NO_ERROR) {
2057 rc = validateStreamRotations(streamList);
2058 }
2059 if (rc != NO_ERROR) {
2060 LOGE("Invalid stream configuration requested!");
2061 pthread_mutex_unlock(&mMutex);
2062 return rc;
2063 }
2064
Emilian Peev0f3c3162017-03-15 12:57:46 +00002065 if (1 < pdStatCount) {
2066 LOGE("HAL doesn't support multiple PD streams");
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
2071 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2072 (1 == pdStatCount)) {
2073 LOGE("HAL doesn't support PD streams in HFR mode!");
2074 pthread_mutex_unlock(&mMutex);
2075 return -EINVAL;
2076 }
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2079 for (size_t i = 0; i < streamList->num_streams; i++) {
2080 camera3_stream_t *newStream = streamList->streams[i];
2081 LOGH("newStream type = %d, stream format = %d "
2082 "stream size : %d x %d, stream rotation = %d",
2083 newStream->stream_type, newStream->format,
2084 newStream->width, newStream->height, newStream->rotation);
2085 //if the stream is in the mStreamList validate it
2086 bool stream_exists = false;
2087 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2088 it != mStreamInfo.end(); it++) {
2089 if ((*it)->stream == newStream) {
2090 QCamera3ProcessingChannel *channel =
2091 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2092 stream_exists = true;
2093 if (channel)
2094 delete channel;
2095 (*it)->status = VALID;
2096 (*it)->stream->priv = NULL;
2097 (*it)->channel = NULL;
2098 }
2099 }
2100 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2101 //new stream
2102 stream_info_t* stream_info;
2103 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2104 if (!stream_info) {
2105 LOGE("Could not allocate stream info");
2106 rc = -ENOMEM;
2107 pthread_mutex_unlock(&mMutex);
2108 return rc;
2109 }
2110 stream_info->stream = newStream;
2111 stream_info->status = VALID;
2112 stream_info->channel = NULL;
2113 mStreamInfo.push_back(stream_info);
2114 }
2115 /* Covers Opaque ZSL and API1 F/W ZSL */
2116 if (IS_USAGE_ZSL(newStream->usage)
2117 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2118 if (zslStream != NULL) {
2119 LOGE("Multiple input/reprocess streams requested!");
2120 pthread_mutex_unlock(&mMutex);
2121 return BAD_VALUE;
2122 }
2123 zslStream = newStream;
2124 }
2125 /* Covers YUV reprocess */
2126 if (inputStream != NULL) {
2127 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2128 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2129 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2130 && inputStream->width == newStream->width
2131 && inputStream->height == newStream->height) {
2132 if (zslStream != NULL) {
2133 /* This scenario indicates multiple YUV streams with same size
2134 * as input stream have been requested, since zsl stream handle
2135 * is solely use for the purpose of overriding the size of streams
2136 * which share h/w streams we will just make a guess here as to
2137 * which of the stream is a ZSL stream, this will be refactored
2138 * once we make generic logic for streams sharing encoder output
2139 */
2140 LOGH("Warning, Multiple ip/reprocess streams requested!");
2141 }
2142 zslStream = newStream;
2143 }
2144 }
2145 }
2146
2147 /* If a zsl stream is set, we know that we have configured at least one input or
2148 bidirectional stream */
2149 if (NULL != zslStream) {
2150 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2151 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2152 mInputStreamInfo.format = zslStream->format;
2153 mInputStreamInfo.usage = zslStream->usage;
2154 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2155 mInputStreamInfo.dim.width,
2156 mInputStreamInfo.dim.height,
2157 mInputStreamInfo.format, mInputStreamInfo.usage);
2158 }
2159
2160 cleanAndSortStreamInfo();
2161 if (mMetadataChannel) {
2162 delete mMetadataChannel;
2163 mMetadataChannel = NULL;
2164 }
2165 if (mSupportChannel) {
2166 delete mSupportChannel;
2167 mSupportChannel = NULL;
2168 }
2169
2170 if (mAnalysisChannel) {
2171 delete mAnalysisChannel;
2172 mAnalysisChannel = NULL;
2173 }
2174
2175 if (mDummyBatchChannel) {
2176 delete mDummyBatchChannel;
2177 mDummyBatchChannel = NULL;
2178 }
2179
Emilian Peev7650c122017-01-19 08:24:33 -08002180 if (mDepthChannel) {
2181 mDepthChannel = NULL;
2182 }
2183
Thierry Strudel2896d122017-02-23 19:18:03 -08002184 char is_type_value[PROPERTY_VALUE_MAX];
2185 property_get("persist.camera.is_type", is_type_value, "4");
2186 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2187
Thierry Strudel3d639192016-09-09 11:52:26 -07002188 //Create metadata channel and initialize it
2189 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2190 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2191 gCamCapability[mCameraId]->color_arrangement);
2192 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2193 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002194 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002195 if (mMetadataChannel == NULL) {
2196 LOGE("failed to allocate metadata channel");
2197 rc = -ENOMEM;
2198 pthread_mutex_unlock(&mMutex);
2199 return rc;
2200 }
2201 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2202 if (rc < 0) {
2203 LOGE("metadata channel initialization failed");
2204 delete mMetadataChannel;
2205 mMetadataChannel = NULL;
2206 pthread_mutex_unlock(&mMutex);
2207 return rc;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002212 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002213 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2214 /* Allocate channel objects for the requested streams */
2215 for (size_t i = 0; i < streamList->num_streams; i++) {
2216 camera3_stream_t *newStream = streamList->streams[i];
2217 uint32_t stream_usage = newStream->usage;
2218 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2219 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2220 struct camera_info *p_info = NULL;
2221 pthread_mutex_lock(&gCamLock);
2222 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2223 pthread_mutex_unlock(&gCamLock);
2224 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2225 || IS_USAGE_ZSL(newStream->usage)) &&
2226 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002227 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002228 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002229 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2230 if (bUseCommonFeatureMask)
2231 zsl_ppmask = commonFeatureMask;
2232 else
2233 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002234 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002235 if (numStreamsOnEncoder > 0)
2236 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2237 else
2238 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002239 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002240 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002241 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002242 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002243 LOGH("Input stream configured, reprocess config");
2244 } else {
2245 //for non zsl streams find out the format
2246 switch (newStream->format) {
2247 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2248 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002249 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002250 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2251 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2252 /* add additional features to pp feature mask */
2253 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2254 mStreamConfigInfo.num_streams);
2255
2256 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2257 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2258 CAM_STREAM_TYPE_VIDEO;
2259 if (m_bTnrEnabled && m_bTnrVideo) {
2260 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2261 CAM_QCOM_FEATURE_CPP_TNR;
2262 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2263 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2264 ~CAM_QCOM_FEATURE_CDS;
2265 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2267 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2268 CAM_QTI_FEATURE_PPEISCORE;
2269 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 } else {
2271 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2272 CAM_STREAM_TYPE_PREVIEW;
2273 if (m_bTnrEnabled && m_bTnrPreview) {
2274 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2275 CAM_QCOM_FEATURE_CPP_TNR;
2276 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2277 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2278 ~CAM_QCOM_FEATURE_CDS;
2279 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002280 if(!m_bSwTnrPreview) {
2281 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2282 ~CAM_QTI_FEATURE_SW_TNR;
2283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002284 padding_info.width_padding = mSurfaceStridePadding;
2285 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002286 previewSize.width = (int32_t)newStream->width;
2287 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002288 }
2289 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2290 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2291 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2292 newStream->height;
2293 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2294 newStream->width;
2295 }
2296 }
2297 break;
2298 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002299 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002300 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2301 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2302 if (bUseCommonFeatureMask)
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2304 commonFeatureMask;
2305 else
2306 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2307 CAM_QCOM_FEATURE_NONE;
2308 } else {
2309 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2310 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2311 }
2312 break;
2313 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002314 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2316 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2317 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2318 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2319 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002320 /* Remove rotation if it is not supported
2321 for 4K LiveVideo snapshot case (online processing) */
2322 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2323 CAM_QCOM_FEATURE_ROTATION)) {
2324 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2325 &= ~CAM_QCOM_FEATURE_ROTATION;
2326 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002327 } else {
2328 if (bUseCommonFeatureMask &&
2329 isOnEncoder(maxViewfinderSize, newStream->width,
2330 newStream->height)) {
2331 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2332 } else {
2333 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2334 }
2335 }
2336 if (isZsl) {
2337 if (zslStream) {
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2339 (int32_t)zslStream->width;
2340 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2341 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002342 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2343 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 } else {
2345 LOGE("Error, No ZSL stream identified");
2346 pthread_mutex_unlock(&mMutex);
2347 return -EINVAL;
2348 }
2349 } else if (m_bIs4KVideo) {
2350 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2351 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2352 } else if (bYuv888OverrideJpeg) {
2353 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2354 (int32_t)largeYuv888Size.width;
2355 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2356 (int32_t)largeYuv888Size.height;
2357 }
2358 break;
2359 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2360 case HAL_PIXEL_FORMAT_RAW16:
2361 case HAL_PIXEL_FORMAT_RAW10:
2362 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2363 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2364 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002365 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2366 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2367 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2368 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2369 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2370 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2371 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2372 gCamCapability[mCameraId]->dt[mPDIndex];
2373 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2374 gCamCapability[mCameraId]->vc[mPDIndex];
2375 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 break;
2377 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002378 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2381 break;
2382 }
2383 }
2384
2385 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2386 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2387 gCamCapability[mCameraId]->color_arrangement);
2388
2389 if (newStream->priv == NULL) {
2390 //New stream, construct channel
2391 switch (newStream->stream_type) {
2392 case CAMERA3_STREAM_INPUT:
2393 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2394 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2395 break;
2396 case CAMERA3_STREAM_BIDIRECTIONAL:
2397 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2398 GRALLOC_USAGE_HW_CAMERA_WRITE;
2399 break;
2400 case CAMERA3_STREAM_OUTPUT:
2401 /* For video encoding stream, set read/write rarely
2402 * flag so that they may be set to un-cached */
2403 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2404 newStream->usage |=
2405 (GRALLOC_USAGE_SW_READ_RARELY |
2406 GRALLOC_USAGE_SW_WRITE_RARELY |
2407 GRALLOC_USAGE_HW_CAMERA_WRITE);
2408 else if (IS_USAGE_ZSL(newStream->usage))
2409 {
2410 LOGD("ZSL usage flag skipping");
2411 }
2412 else if (newStream == zslStream
2413 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2414 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2415 } else
2416 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2417 break;
2418 default:
2419 LOGE("Invalid stream_type %d", newStream->stream_type);
2420 break;
2421 }
2422
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002423 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2425 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2426 QCamera3ProcessingChannel *channel = NULL;
2427 switch (newStream->format) {
2428 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2429 if ((newStream->usage &
2430 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2431 (streamList->operation_mode ==
2432 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2433 ) {
2434 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2435 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002436 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 this,
2438 newStream,
2439 (cam_stream_type_t)
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2441 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2442 mMetadataChannel,
2443 0); //heap buffers are not required for HFR video channel
2444 if (channel == NULL) {
2445 LOGE("allocation of channel failed");
2446 pthread_mutex_unlock(&mMutex);
2447 return -ENOMEM;
2448 }
2449 //channel->getNumBuffers() will return 0 here so use
2450 //MAX_INFLIGH_HFR_REQUESTS
2451 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2452 newStream->priv = channel;
2453 LOGI("num video buffers in HFR mode: %d",
2454 MAX_INFLIGHT_HFR_REQUESTS);
2455 } else {
2456 /* Copy stream contents in HFR preview only case to create
2457 * dummy batch channel so that sensor streaming is in
2458 * HFR mode */
2459 if (!m_bIsVideo && (streamList->operation_mode ==
2460 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2461 mDummyBatchStream = *newStream;
2462 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002463 int bufferCount = MAX_INFLIGHT_REQUESTS;
2464 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2465 CAM_STREAM_TYPE_VIDEO) {
2466 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2467 bufferCount = MAX_VIDEO_BUFFERS;
2468 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2470 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002471 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002472 this,
2473 newStream,
2474 (cam_stream_type_t)
2475 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2477 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002478 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 if (channel == NULL) {
2480 LOGE("allocation of channel failed");
2481 pthread_mutex_unlock(&mMutex);
2482 return -ENOMEM;
2483 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002484 /* disable UBWC for preview, though supported,
2485 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002486 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002487 (previewSize.width == (int32_t)videoWidth)&&
2488 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002489 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002490 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002491 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002492 newStream->max_buffers = channel->getNumBuffers();
2493 newStream->priv = channel;
2494 }
2495 break;
2496 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2497 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2498 mChannelHandle,
2499 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002500 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 this,
2502 newStream,
2503 (cam_stream_type_t)
2504 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2506 mMetadataChannel);
2507 if (channel == NULL) {
2508 LOGE("allocation of YUV channel failed");
2509 pthread_mutex_unlock(&mMutex);
2510 return -ENOMEM;
2511 }
2512 newStream->max_buffers = channel->getNumBuffers();
2513 newStream->priv = channel;
2514 break;
2515 }
2516 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2517 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002518 case HAL_PIXEL_FORMAT_RAW10: {
2519 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2520 (HAL_DATASPACE_DEPTH != newStream->data_space))
2521 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 mRawChannel = new QCamera3RawChannel(
2523 mCameraHandle->camera_handle, mChannelHandle,
2524 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002525 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002526 this, newStream,
2527 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002528 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002529 if (mRawChannel == NULL) {
2530 LOGE("allocation of raw channel failed");
2531 pthread_mutex_unlock(&mMutex);
2532 return -ENOMEM;
2533 }
2534 newStream->max_buffers = mRawChannel->getNumBuffers();
2535 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2536 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002537 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002538 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002539 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2540 mDepthChannel = new QCamera3DepthChannel(
2541 mCameraHandle->camera_handle, mChannelHandle,
2542 mCameraHandle->ops, NULL, NULL, &padding_info,
2543 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2544 mMetadataChannel);
2545 if (NULL == mDepthChannel) {
2546 LOGE("Allocation of depth channel failed");
2547 pthread_mutex_unlock(&mMutex);
2548 return NO_MEMORY;
2549 }
2550 newStream->priv = mDepthChannel;
2551 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2552 } else {
2553 // Max live snapshot inflight buffer is 1. This is to mitigate
2554 // frame drop issues for video snapshot. The more buffers being
2555 // allocated, the more frame drops there are.
2556 mPictureChannel = new QCamera3PicChannel(
2557 mCameraHandle->camera_handle, mChannelHandle,
2558 mCameraHandle->ops, captureResultCb,
2559 setBufferErrorStatus, &padding_info, this, newStream,
2560 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2561 m_bIs4KVideo, isZsl, mMetadataChannel,
2562 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2563 if (mPictureChannel == NULL) {
2564 LOGE("allocation of channel failed");
2565 pthread_mutex_unlock(&mMutex);
2566 return -ENOMEM;
2567 }
2568 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2569 newStream->max_buffers = mPictureChannel->getNumBuffers();
2570 mPictureChannel->overrideYuvSize(
2571 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2572 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 break;
2575
2576 default:
2577 LOGE("not a supported format 0x%x", newStream->format);
2578 break;
2579 }
2580 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2581 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2582 } else {
2583 LOGE("Error, Unknown stream type");
2584 pthread_mutex_unlock(&mMutex);
2585 return -EINVAL;
2586 }
2587
2588 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002589 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2590 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002591 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002592 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2594 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2595 }
2596 }
2597
2598 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2599 it != mStreamInfo.end(); it++) {
2600 if ((*it)->stream == newStream) {
2601 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2602 break;
2603 }
2604 }
2605 } else {
2606 // Channel already exists for this stream
2607 // Do nothing for now
2608 }
2609 padding_info = gCamCapability[mCameraId]->padding_info;
2610
Emilian Peev7650c122017-01-19 08:24:33 -08002611 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002612 * since there is no real stream associated with it
2613 */
Emilian Peev7650c122017-01-19 08:24:33 -08002614 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002615 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2616 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002617 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002618 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 }
2620
Thierry Strudel2896d122017-02-23 19:18:03 -08002621 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2622 onlyRaw = false;
2623 }
2624
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002625 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002626 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002627 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002628 cam_analysis_info_t analysisInfo;
2629 int32_t ret = NO_ERROR;
2630 ret = mCommon.getAnalysisInfo(
2631 FALSE,
2632 analysisFeatureMask,
2633 &analysisInfo);
2634 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002635 cam_color_filter_arrangement_t analysis_color_arrangement =
2636 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2637 CAM_FILTER_ARRANGEMENT_Y :
2638 gCamCapability[mCameraId]->color_arrangement);
2639 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2640 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002641 cam_dimension_t analysisDim;
2642 analysisDim = mCommon.getMatchingDimension(previewSize,
2643 analysisInfo.analysis_recommended_res);
2644
2645 mAnalysisChannel = new QCamera3SupportChannel(
2646 mCameraHandle->camera_handle,
2647 mChannelHandle,
2648 mCameraHandle->ops,
2649 &analysisInfo.analysis_padding_info,
2650 analysisFeatureMask,
2651 CAM_STREAM_TYPE_ANALYSIS,
2652 &analysisDim,
2653 (analysisInfo.analysis_format
2654 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2655 : CAM_FORMAT_YUV_420_NV21),
2656 analysisInfo.hw_analysis_supported,
2657 gCamCapability[mCameraId]->color_arrangement,
2658 this,
2659 0); // force buffer count to 0
2660 } else {
2661 LOGW("getAnalysisInfo failed, ret = %d", ret);
2662 }
2663 if (!mAnalysisChannel) {
2664 LOGW("Analysis channel cannot be created");
2665 }
2666 }
2667
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 //RAW DUMP channel
2669 if (mEnableRawDump && isRawStreamRequested == false){
2670 cam_dimension_t rawDumpSize;
2671 rawDumpSize = getMaxRawSize(mCameraId);
2672 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2673 setPAAFSupport(rawDumpFeatureMask,
2674 CAM_STREAM_TYPE_RAW,
2675 gCamCapability[mCameraId]->color_arrangement);
2676 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2677 mChannelHandle,
2678 mCameraHandle->ops,
2679 rawDumpSize,
2680 &padding_info,
2681 this, rawDumpFeatureMask);
2682 if (!mRawDumpChannel) {
2683 LOGE("Raw Dump channel cannot be created");
2684 pthread_mutex_unlock(&mMutex);
2685 return -ENOMEM;
2686 }
2687 }
2688
Thierry Strudel3d639192016-09-09 11:52:26 -07002689 if (mAnalysisChannel) {
2690 cam_analysis_info_t analysisInfo;
2691 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2692 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2693 CAM_STREAM_TYPE_ANALYSIS;
2694 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2695 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002696 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2698 &analysisInfo);
2699 if (rc != NO_ERROR) {
2700 LOGE("getAnalysisInfo failed, ret = %d", rc);
2701 pthread_mutex_unlock(&mMutex);
2702 return rc;
2703 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002704 cam_color_filter_arrangement_t analysis_color_arrangement =
2705 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2706 CAM_FILTER_ARRANGEMENT_Y :
2707 gCamCapability[mCameraId]->color_arrangement);
2708 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 analysis_color_arrangement);
2711
Thierry Strudel3d639192016-09-09 11:52:26 -07002712 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002713 mCommon.getMatchingDimension(previewSize,
2714 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002715 mStreamConfigInfo.num_streams++;
2716 }
2717
Thierry Strudel2896d122017-02-23 19:18:03 -08002718 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002719 cam_analysis_info_t supportInfo;
2720 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2721 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2722 setPAAFSupport(callbackFeatureMask,
2723 CAM_STREAM_TYPE_CALLBACK,
2724 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002725 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002726 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002727 if (ret != NO_ERROR) {
2728 /* Ignore the error for Mono camera
2729 * because the PAAF bit mask is only set
2730 * for CAM_STREAM_TYPE_ANALYSIS stream type
2731 */
2732 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2733 LOGW("getAnalysisInfo failed, ret = %d", ret);
2734 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 }
2736 mSupportChannel = new QCamera3SupportChannel(
2737 mCameraHandle->camera_handle,
2738 mChannelHandle,
2739 mCameraHandle->ops,
2740 &gCamCapability[mCameraId]->padding_info,
2741 callbackFeatureMask,
2742 CAM_STREAM_TYPE_CALLBACK,
2743 &QCamera3SupportChannel::kDim,
2744 CAM_FORMAT_YUV_420_NV21,
2745 supportInfo.hw_analysis_supported,
2746 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002747 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 if (!mSupportChannel) {
2749 LOGE("dummy channel cannot be created");
2750 pthread_mutex_unlock(&mMutex);
2751 return -ENOMEM;
2752 }
2753 }
2754
2755 if (mSupportChannel) {
2756 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2757 QCamera3SupportChannel::kDim;
2758 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2759 CAM_STREAM_TYPE_CALLBACK;
2760 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2761 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2762 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2763 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2764 gCamCapability[mCameraId]->color_arrangement);
2765 mStreamConfigInfo.num_streams++;
2766 }
2767
2768 if (mRawDumpChannel) {
2769 cam_dimension_t rawSize;
2770 rawSize = getMaxRawSize(mCameraId);
2771 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2772 rawSize;
2773 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2774 CAM_STREAM_TYPE_RAW;
2775 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2776 CAM_QCOM_FEATURE_NONE;
2777 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2778 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2779 gCamCapability[mCameraId]->color_arrangement);
2780 mStreamConfigInfo.num_streams++;
2781 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002782
2783 if (mHdrPlusRawSrcChannel) {
2784 cam_dimension_t rawSize;
2785 rawSize = getMaxRawSize(mCameraId);
2786 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2787 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2788 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2789 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2790 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2791 gCamCapability[mCameraId]->color_arrangement);
2792 mStreamConfigInfo.num_streams++;
2793 }
2794
Thierry Strudel3d639192016-09-09 11:52:26 -07002795 /* In HFR mode, if video stream is not added, create a dummy channel so that
2796 * ISP can create a batch mode even for preview only case. This channel is
2797 * never 'start'ed (no stream-on), it is only 'initialized' */
2798 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2799 !m_bIsVideo) {
2800 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2801 setPAAFSupport(dummyFeatureMask,
2802 CAM_STREAM_TYPE_VIDEO,
2803 gCamCapability[mCameraId]->color_arrangement);
2804 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2805 mChannelHandle,
2806 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002807 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002808 this,
2809 &mDummyBatchStream,
2810 CAM_STREAM_TYPE_VIDEO,
2811 dummyFeatureMask,
2812 mMetadataChannel);
2813 if (NULL == mDummyBatchChannel) {
2814 LOGE("creation of mDummyBatchChannel failed."
2815 "Preview will use non-hfr sensor mode ");
2816 }
2817 }
2818 if (mDummyBatchChannel) {
2819 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2820 mDummyBatchStream.width;
2821 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2822 mDummyBatchStream.height;
2823 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2824 CAM_STREAM_TYPE_VIDEO;
2825 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2826 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2828 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2829 gCamCapability[mCameraId]->color_arrangement);
2830 mStreamConfigInfo.num_streams++;
2831 }
2832
2833 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2834 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 m_bIs4KVideo ? 0 :
2836 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002837
2838 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2839 for (pendingRequestIterator i = mPendingRequestsList.begin();
2840 i != mPendingRequestsList.end();) {
2841 i = erasePendingRequest(i);
2842 }
2843 mPendingFrameDropList.clear();
2844 // Initialize/Reset the pending buffers list
2845 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2846 req.mPendingBufferList.clear();
2847 }
2848 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2849
Thierry Strudel3d639192016-09-09 11:52:26 -07002850 mCurJpegMeta.clear();
2851 //Get min frame duration for this streams configuration
2852 deriveMinFrameDuration();
2853
Chien-Yu Chenee335912017-02-09 17:53:20 -08002854 mFirstPreviewIntentSeen = false;
2855
2856 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002857 {
2858 Mutex::Autolock l(gHdrPlusClientLock);
2859 disableHdrPlusModeLocked();
2860 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002861
Thierry Strudel3d639192016-09-09 11:52:26 -07002862 // Update state
2863 mState = CONFIGURED;
2864
2865 pthread_mutex_unlock(&mMutex);
2866
2867 return rc;
2868}
2869
2870/*===========================================================================
2871 * FUNCTION : validateCaptureRequest
2872 *
2873 * DESCRIPTION: validate a capture request from camera service
2874 *
2875 * PARAMETERS :
2876 * @request : request from framework to process
2877 *
2878 * RETURN :
2879 *
2880 *==========================================================================*/
2881int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002882 camera3_capture_request_t *request,
2883 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002884{
2885 ssize_t idx = 0;
2886 const camera3_stream_buffer_t *b;
2887 CameraMetadata meta;
2888
2889 /* Sanity check the request */
2890 if (request == NULL) {
2891 LOGE("NULL capture request");
2892 return BAD_VALUE;
2893 }
2894
2895 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2896 /*settings cannot be null for the first request*/
2897 return BAD_VALUE;
2898 }
2899
2900 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002901 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2902 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 LOGE("Request %d: No output buffers provided!",
2904 __FUNCTION__, frameNumber);
2905 return BAD_VALUE;
2906 }
2907 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2908 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2909 request->num_output_buffers, MAX_NUM_STREAMS);
2910 return BAD_VALUE;
2911 }
2912 if (request->input_buffer != NULL) {
2913 b = request->input_buffer;
2914 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2915 LOGE("Request %d: Buffer %ld: Status not OK!",
2916 frameNumber, (long)idx);
2917 return BAD_VALUE;
2918 }
2919 if (b->release_fence != -1) {
2920 LOGE("Request %d: Buffer %ld: Has a release fence!",
2921 frameNumber, (long)idx);
2922 return BAD_VALUE;
2923 }
2924 if (b->buffer == NULL) {
2925 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2926 frameNumber, (long)idx);
2927 return BAD_VALUE;
2928 }
2929 }
2930
2931 // Validate all buffers
2932 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002933 if (b == NULL) {
2934 return BAD_VALUE;
2935 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002936 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002937 QCamera3ProcessingChannel *channel =
2938 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2939 if (channel == NULL) {
2940 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2941 frameNumber, (long)idx);
2942 return BAD_VALUE;
2943 }
2944 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2945 LOGE("Request %d: Buffer %ld: Status not OK!",
2946 frameNumber, (long)idx);
2947 return BAD_VALUE;
2948 }
2949 if (b->release_fence != -1) {
2950 LOGE("Request %d: Buffer %ld: Has a release fence!",
2951 frameNumber, (long)idx);
2952 return BAD_VALUE;
2953 }
2954 if (b->buffer == NULL) {
2955 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2956 frameNumber, (long)idx);
2957 return BAD_VALUE;
2958 }
2959 if (*(b->buffer) == NULL) {
2960 LOGE("Request %d: Buffer %ld: NULL private handle!",
2961 frameNumber, (long)idx);
2962 return BAD_VALUE;
2963 }
2964 idx++;
2965 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002966 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002967 return NO_ERROR;
2968}
2969
2970/*===========================================================================
2971 * FUNCTION : deriveMinFrameDuration
2972 *
2973 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2974 * on currently configured streams.
2975 *
2976 * PARAMETERS : NONE
2977 *
2978 * RETURN : NONE
2979 *
2980 *==========================================================================*/
2981void QCamera3HardwareInterface::deriveMinFrameDuration()
2982{
2983 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2984
2985 maxJpegDim = 0;
2986 maxProcessedDim = 0;
2987 maxRawDim = 0;
2988
2989 // Figure out maximum jpeg, processed, and raw dimensions
2990 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2991 it != mStreamInfo.end(); it++) {
2992
2993 // Input stream doesn't have valid stream_type
2994 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2995 continue;
2996
2997 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2998 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2999 if (dimension > maxJpegDim)
3000 maxJpegDim = dimension;
3001 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3002 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3003 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3004 if (dimension > maxRawDim)
3005 maxRawDim = dimension;
3006 } else {
3007 if (dimension > maxProcessedDim)
3008 maxProcessedDim = dimension;
3009 }
3010 }
3011
3012 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3013 MAX_SIZES_CNT);
3014
3015 //Assume all jpeg dimensions are in processed dimensions.
3016 if (maxJpegDim > maxProcessedDim)
3017 maxProcessedDim = maxJpegDim;
3018 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3019 if (maxProcessedDim > maxRawDim) {
3020 maxRawDim = INT32_MAX;
3021
3022 for (size_t i = 0; i < count; i++) {
3023 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3024 gCamCapability[mCameraId]->raw_dim[i].height;
3025 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3026 maxRawDim = dimension;
3027 }
3028 }
3029
3030 //Find minimum durations for processed, jpeg, and raw
3031 for (size_t i = 0; i < count; i++) {
3032 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3033 gCamCapability[mCameraId]->raw_dim[i].height) {
3034 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3035 break;
3036 }
3037 }
3038 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3039 for (size_t i = 0; i < count; i++) {
3040 if (maxProcessedDim ==
3041 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3042 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3043 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3044 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3045 break;
3046 }
3047 }
3048}
3049
3050/*===========================================================================
3051 * FUNCTION : getMinFrameDuration
3052 *
3053 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3054 * and current request configuration.
3055 *
3056 * PARAMETERS : @request: requset sent by the frameworks
3057 *
3058 * RETURN : min farme duration for a particular request
3059 *
3060 *==========================================================================*/
3061int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3062{
3063 bool hasJpegStream = false;
3064 bool hasRawStream = false;
3065 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3066 const camera3_stream_t *stream = request->output_buffers[i].stream;
3067 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3068 hasJpegStream = true;
3069 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3070 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3071 stream->format == HAL_PIXEL_FORMAT_RAW16)
3072 hasRawStream = true;
3073 }
3074
3075 if (!hasJpegStream)
3076 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3077 else
3078 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3079}
3080
3081/*===========================================================================
3082 * FUNCTION : handleBuffersDuringFlushLock
3083 *
3084 * DESCRIPTION: Account for buffers returned from back-end during flush
3085 * This function is executed while mMutex is held by the caller.
3086 *
3087 * PARAMETERS :
3088 * @buffer: image buffer for the callback
3089 *
3090 * RETURN :
3091 *==========================================================================*/
3092void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3093{
3094 bool buffer_found = false;
3095 for (List<PendingBuffersInRequest>::iterator req =
3096 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3097 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3098 for (List<PendingBufferInfo>::iterator i =
3099 req->mPendingBufferList.begin();
3100 i != req->mPendingBufferList.end(); i++) {
3101 if (i->buffer == buffer->buffer) {
3102 mPendingBuffersMap.numPendingBufsAtFlush--;
3103 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3104 buffer->buffer, req->frame_number,
3105 mPendingBuffersMap.numPendingBufsAtFlush);
3106 buffer_found = true;
3107 break;
3108 }
3109 }
3110 if (buffer_found) {
3111 break;
3112 }
3113 }
3114 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3115 //signal the flush()
3116 LOGD("All buffers returned to HAL. Continue flush");
3117 pthread_cond_signal(&mBuffersCond);
3118 }
3119}
3120
Thierry Strudel3d639192016-09-09 11:52:26 -07003121/*===========================================================================
3122 * FUNCTION : handleBatchMetadata
3123 *
3124 * DESCRIPTION: Handles metadata buffer callback in batch mode
3125 *
3126 * PARAMETERS : @metadata_buf: metadata buffer
3127 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3128 * the meta buf in this method
3129 *
3130 * RETURN :
3131 *
3132 *==========================================================================*/
3133void QCamera3HardwareInterface::handleBatchMetadata(
3134 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3135{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003136 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003137
3138 if (NULL == metadata_buf) {
3139 LOGE("metadata_buf is NULL");
3140 return;
3141 }
3142 /* In batch mode, the metdata will contain the frame number and timestamp of
3143 * the last frame in the batch. Eg: a batch containing buffers from request
3144 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3145 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3146 * multiple process_capture_results */
3147 metadata_buffer_t *metadata =
3148 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3149 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3150 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3151 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3152 uint32_t frame_number = 0, urgent_frame_number = 0;
3153 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3154 bool invalid_metadata = false;
3155 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3156 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003157 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003158
3159 int32_t *p_frame_number_valid =
3160 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3161 uint32_t *p_frame_number =
3162 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3163 int64_t *p_capture_time =
3164 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3165 int32_t *p_urgent_frame_number_valid =
3166 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3167 uint32_t *p_urgent_frame_number =
3168 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3169
3170 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3171 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3172 (NULL == p_urgent_frame_number)) {
3173 LOGE("Invalid metadata");
3174 invalid_metadata = true;
3175 } else {
3176 frame_number_valid = *p_frame_number_valid;
3177 last_frame_number = *p_frame_number;
3178 last_frame_capture_time = *p_capture_time;
3179 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3180 last_urgent_frame_number = *p_urgent_frame_number;
3181 }
3182
3183 /* In batchmode, when no video buffers are requested, set_parms are sent
3184 * for every capture_request. The difference between consecutive urgent
3185 * frame numbers and frame numbers should be used to interpolate the
3186 * corresponding frame numbers and time stamps */
3187 pthread_mutex_lock(&mMutex);
3188 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003189 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3190 if(idx < 0) {
3191 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3192 last_urgent_frame_number);
3193 mState = ERROR;
3194 pthread_mutex_unlock(&mMutex);
3195 return;
3196 }
3197 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003198 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3199 first_urgent_frame_number;
3200
3201 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3202 urgent_frame_number_valid,
3203 first_urgent_frame_number, last_urgent_frame_number);
3204 }
3205
3206 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003207 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3208 if(idx < 0) {
3209 LOGE("Invalid frame number received: %d. Irrecoverable error",
3210 last_frame_number);
3211 mState = ERROR;
3212 pthread_mutex_unlock(&mMutex);
3213 return;
3214 }
3215 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003216 frameNumDiff = last_frame_number + 1 -
3217 first_frame_number;
3218 mPendingBatchMap.removeItem(last_frame_number);
3219
3220 LOGD("frm: valid: %d frm_num: %d - %d",
3221 frame_number_valid,
3222 first_frame_number, last_frame_number);
3223
3224 }
3225 pthread_mutex_unlock(&mMutex);
3226
3227 if (urgent_frame_number_valid || frame_number_valid) {
3228 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3229 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3230 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3231 urgentFrameNumDiff, last_urgent_frame_number);
3232 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3233 LOGE("frameNumDiff: %d frameNum: %d",
3234 frameNumDiff, last_frame_number);
3235 }
3236
3237 for (size_t i = 0; i < loopCount; i++) {
3238 /* handleMetadataWithLock is called even for invalid_metadata for
3239 * pipeline depth calculation */
3240 if (!invalid_metadata) {
3241 /* Infer frame number. Batch metadata contains frame number of the
3242 * last frame */
3243 if (urgent_frame_number_valid) {
3244 if (i < urgentFrameNumDiff) {
3245 urgent_frame_number =
3246 first_urgent_frame_number + i;
3247 LOGD("inferred urgent frame_number: %d",
3248 urgent_frame_number);
3249 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3250 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3251 } else {
3252 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3253 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3254 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3255 }
3256 }
3257
3258 /* Infer frame number. Batch metadata contains frame number of the
3259 * last frame */
3260 if (frame_number_valid) {
3261 if (i < frameNumDiff) {
3262 frame_number = first_frame_number + i;
3263 LOGD("inferred frame_number: %d", frame_number);
3264 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3265 CAM_INTF_META_FRAME_NUMBER, frame_number);
3266 } else {
3267 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3268 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3269 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3270 }
3271 }
3272
3273 if (last_frame_capture_time) {
3274 //Infer timestamp
3275 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003276 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003277 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003278 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003279 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3280 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3281 LOGD("batch capture_time: %lld, capture_time: %lld",
3282 last_frame_capture_time, capture_time);
3283 }
3284 }
3285 pthread_mutex_lock(&mMutex);
3286 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003287 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003288 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3289 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003290 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003291 pthread_mutex_unlock(&mMutex);
3292 }
3293
3294 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003295 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 mMetadataChannel->bufDone(metadata_buf);
3297 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003298 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003299 }
3300}
3301
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003302void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3303 camera3_error_msg_code_t errorCode)
3304{
3305 camera3_notify_msg_t notify_msg;
3306 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3307 notify_msg.type = CAMERA3_MSG_ERROR;
3308 notify_msg.message.error.error_code = errorCode;
3309 notify_msg.message.error.error_stream = NULL;
3310 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003311 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003312
3313 return;
3314}
Thierry Strudel3d639192016-09-09 11:52:26 -07003315/*===========================================================================
3316 * FUNCTION : handleMetadataWithLock
3317 *
3318 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3319 *
3320 * PARAMETERS : @metadata_buf: metadata buffer
3321 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3322 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003323 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3324 * last urgent metadata in a batch. Always true for non-batch mode
3325 * @lastMetadataInBatch: Boolean to indicate whether this is the
3326 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003327 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3328 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003329 *
3330 * RETURN :
3331 *
3332 *==========================================================================*/
3333void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003334 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003335 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3336 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003337{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003338 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003339 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3340 //during flush do not send metadata from this thread
3341 LOGD("not sending metadata during flush or when mState is error");
3342 if (free_and_bufdone_meta_buf) {
3343 mMetadataChannel->bufDone(metadata_buf);
3344 free(metadata_buf);
3345 }
3346 return;
3347 }
3348
3349 //not in flush
3350 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3351 int32_t frame_number_valid, urgent_frame_number_valid;
3352 uint32_t frame_number, urgent_frame_number;
3353 int64_t capture_time;
3354 nsecs_t currentSysTime;
3355
3356 int32_t *p_frame_number_valid =
3357 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3358 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3359 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3360 int32_t *p_urgent_frame_number_valid =
3361 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3362 uint32_t *p_urgent_frame_number =
3363 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3364 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3365 metadata) {
3366 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3367 *p_frame_number_valid, *p_frame_number);
3368 }
3369
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003370 camera_metadata_t *resultMetadata = nullptr;
3371
Thierry Strudel3d639192016-09-09 11:52:26 -07003372 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3373 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3374 LOGE("Invalid metadata");
3375 if (free_and_bufdone_meta_buf) {
3376 mMetadataChannel->bufDone(metadata_buf);
3377 free(metadata_buf);
3378 }
3379 goto done_metadata;
3380 }
3381 frame_number_valid = *p_frame_number_valid;
3382 frame_number = *p_frame_number;
3383 capture_time = *p_capture_time;
3384 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3385 urgent_frame_number = *p_urgent_frame_number;
3386 currentSysTime = systemTime(CLOCK_MONOTONIC);
3387
3388 // Detect if buffers from any requests are overdue
3389 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003390 int64_t timeout;
3391 {
3392 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3393 // If there is a pending HDR+ request, the following requests may be blocked until the
3394 // HDR+ request is done. So allow a longer timeout.
3395 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3396 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3397 }
3398
3399 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003400 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003401 assert(missed.stream->priv);
3402 if (missed.stream->priv) {
3403 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3404 assert(ch->mStreams[0]);
3405 if (ch->mStreams[0]) {
3406 LOGE("Cancel missing frame = %d, buffer = %p,"
3407 "stream type = %d, stream format = %d",
3408 req.frame_number, missed.buffer,
3409 ch->mStreams[0]->getMyType(), missed.stream->format);
3410 ch->timeoutFrame(req.frame_number);
3411 }
3412 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003413 }
3414 }
3415 }
3416 //Partial result on process_capture_result for timestamp
3417 if (urgent_frame_number_valid) {
3418 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3419 urgent_frame_number, capture_time);
3420
3421 //Recieved an urgent Frame Number, handle it
3422 //using partial results
3423 for (pendingRequestIterator i =
3424 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3425 LOGD("Iterator Frame = %d urgent frame = %d",
3426 i->frame_number, urgent_frame_number);
3427
3428 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3429 (i->partial_result_cnt == 0)) {
3430 LOGE("Error: HAL missed urgent metadata for frame number %d",
3431 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003432 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003433 }
3434
3435 if (i->frame_number == urgent_frame_number &&
3436 i->bUrgentReceived == 0) {
3437
3438 camera3_capture_result_t result;
3439 memset(&result, 0, sizeof(camera3_capture_result_t));
3440
3441 i->partial_result_cnt++;
3442 i->bUrgentReceived = 1;
3443 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003444 result.result = translateCbUrgentMetadataToResultMetadata(
3445 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 // Populate metadata result
3447 result.frame_number = urgent_frame_number;
3448 result.num_output_buffers = 0;
3449 result.output_buffers = NULL;
3450 result.partial_result = i->partial_result_cnt;
3451
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003452 {
3453 Mutex::Autolock l(gHdrPlusClientLock);
3454 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3455 // Notify HDR+ client about the partial metadata.
3456 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3457 result.partial_result == PARTIAL_RESULT_COUNT);
3458 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003459 }
3460
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003461 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 LOGD("urgent frame_number = %u, capture_time = %lld",
3463 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003464 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3465 // Instant AEC settled for this frame.
3466 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3467 mInstantAECSettledFrameNumber = urgent_frame_number;
3468 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003469 free_camera_metadata((camera_metadata_t *)result.result);
3470 break;
3471 }
3472 }
3473 }
3474
3475 if (!frame_number_valid) {
3476 LOGD("Not a valid normal frame number, used as SOF only");
3477 if (free_and_bufdone_meta_buf) {
3478 mMetadataChannel->bufDone(metadata_buf);
3479 free(metadata_buf);
3480 }
3481 goto done_metadata;
3482 }
3483 LOGH("valid frame_number = %u, capture_time = %lld",
3484 frame_number, capture_time);
3485
Emilian Peev7650c122017-01-19 08:24:33 -08003486 if (metadata->is_depth_data_valid) {
3487 handleDepthDataLocked(metadata->depth_data, frame_number);
3488 }
3489
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003490 // Check whether any stream buffer corresponding to this is dropped or not
3491 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3492 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3493 for (auto & pendingRequest : mPendingRequestsList) {
3494 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3495 mInstantAECSettledFrameNumber)) {
3496 camera3_notify_msg_t notify_msg = {};
3497 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003498 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003499 QCamera3ProcessingChannel *channel =
3500 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003501 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003502 if (p_cam_frame_drop) {
3503 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003504 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003505 // Got the stream ID for drop frame.
3506 dropFrame = true;
3507 break;
3508 }
3509 }
3510 } else {
3511 // This is instant AEC case.
3512 // For instant AEC drop the stream untill AEC is settled.
3513 dropFrame = true;
3514 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003515
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003516 if (dropFrame) {
3517 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3518 if (p_cam_frame_drop) {
3519 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003521 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003522 } else {
3523 // For instant AEC, inform frame drop and frame number
3524 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3525 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003526 pendingRequest.frame_number, streamID,
3527 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003528 }
3529 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003530 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003531 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003532 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003533 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003534 if (p_cam_frame_drop) {
3535 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003536 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003537 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003538 } else {
3539 // For instant AEC, inform frame drop and frame number
3540 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3541 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 pendingRequest.frame_number, streamID,
3543 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003544 }
3545 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003546 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003547 PendingFrameDrop.stream_ID = streamID;
3548 // Add the Frame drop info to mPendingFrameDropList
3549 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003550 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003551 }
3552 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003555 for (auto & pendingRequest : mPendingRequestsList) {
3556 // Find the pending request with the frame number.
3557 if (pendingRequest.frame_number == frame_number) {
3558 // Update the sensor timestamp.
3559 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003560
Thierry Strudel3d639192016-09-09 11:52:26 -07003561
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003562 /* Set the timestamp in display metadata so that clients aware of
3563 private_handle such as VT can use this un-modified timestamps.
3564 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003566
Thierry Strudel3d639192016-09-09 11:52:26 -07003567 // Find channel requiring metadata, meaning internal offline postprocess
3568 // is needed.
3569 //TODO: for now, we don't support two streams requiring metadata at the same time.
3570 // (because we are not making copies, and metadata buffer is not reference counted.
3571 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3573 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003574 if (iter->need_metadata) {
3575 internalPproc = true;
3576 QCamera3ProcessingChannel *channel =
3577 (QCamera3ProcessingChannel *)iter->stream->priv;
3578 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003579 if(p_is_metabuf_queued != NULL) {
3580 *p_is_metabuf_queued = true;
3581 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003582 break;
3583 }
3584 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003585 for (auto itr = pendingRequest.internalRequestList.begin();
3586 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003587 if (itr->need_metadata) {
3588 internalPproc = true;
3589 QCamera3ProcessingChannel *channel =
3590 (QCamera3ProcessingChannel *)itr->stream->priv;
3591 channel->queueReprocMetadata(metadata_buf);
3592 break;
3593 }
3594 }
3595
Thierry Strudel54dc9782017-02-15 12:12:10 -08003596 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003597 resultMetadata = translateFromHalMetadata(metadata,
3598 pendingRequest.timestamp, pendingRequest.request_id,
3599 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3600 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003601 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003602 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003603 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003604 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003605 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003606 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003607
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003608 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003609
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003610 if (pendingRequest.blob_request) {
3611 //Dump tuning metadata if enabled and available
3612 char prop[PROPERTY_VALUE_MAX];
3613 memset(prop, 0, sizeof(prop));
3614 property_get("persist.camera.dumpmetadata", prop, "0");
3615 int32_t enabled = atoi(prop);
3616 if (enabled && metadata->is_tuning_params_valid) {
3617 dumpMetadataToFile(metadata->tuning_params,
3618 mMetaFrameCount,
3619 enabled,
3620 "Snapshot",
3621 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003622 }
3623 }
3624
3625 if (!internalPproc) {
3626 LOGD("couldn't find need_metadata for this metadata");
3627 // Return metadata buffer
3628 if (free_and_bufdone_meta_buf) {
3629 mMetadataChannel->bufDone(metadata_buf);
3630 free(metadata_buf);
3631 }
3632 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003633
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003634 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003635 }
3636 }
3637
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003638 // Try to send out shutter callbacks and capture results.
3639 handlePendingResultsWithLock(frame_number, resultMetadata);
3640 return;
3641
Thierry Strudel3d639192016-09-09 11:52:26 -07003642done_metadata:
3643 for (pendingRequestIterator i = mPendingRequestsList.begin();
3644 i != mPendingRequestsList.end() ;i++) {
3645 i->pipeline_depth++;
3646 }
3647 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3648 unblockRequestIfNecessary();
3649}
3650
3651/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003652 * FUNCTION : handleDepthDataWithLock
3653 *
3654 * DESCRIPTION: Handles incoming depth data
3655 *
3656 * PARAMETERS : @depthData : Depth data
3657 * @frameNumber: Frame number of the incoming depth data
3658 *
3659 * RETURN :
3660 *
3661 *==========================================================================*/
3662void QCamera3HardwareInterface::handleDepthDataLocked(
3663 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3664 uint32_t currentFrameNumber;
3665 buffer_handle_t *depthBuffer;
3666
3667 if (nullptr == mDepthChannel) {
3668 LOGE("Depth channel not present!");
3669 return;
3670 }
3671
3672 camera3_stream_buffer_t resultBuffer =
3673 {.acquire_fence = -1,
3674 .release_fence = -1,
3675 .status = CAMERA3_BUFFER_STATUS_OK,
3676 .buffer = nullptr,
3677 .stream = mDepthChannel->getStream()};
3678 camera3_capture_result_t result =
3679 {.result = nullptr,
3680 .num_output_buffers = 1,
3681 .output_buffers = &resultBuffer,
3682 .partial_result = 0,
3683 .frame_number = 0};
3684
3685 do {
3686 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3687 if (nullptr == depthBuffer) {
3688 break;
3689 }
3690
3691 result.frame_number = currentFrameNumber;
3692 resultBuffer.buffer = depthBuffer;
3693 if (currentFrameNumber == frameNumber) {
3694 int32_t rc = mDepthChannel->populateDepthData(depthData,
3695 frameNumber);
3696 if (NO_ERROR != rc) {
3697 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3698 } else {
3699 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3700 }
3701 } else if (currentFrameNumber > frameNumber) {
3702 break;
3703 } else {
3704 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3705 {{currentFrameNumber, mDepthChannel->getStream(),
3706 CAMERA3_MSG_ERROR_BUFFER}}};
3707 orchestrateNotify(&notify_msg);
3708
3709 LOGE("Depth buffer for frame number: %d is missing "
3710 "returning back!", currentFrameNumber);
3711 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3712 }
3713 mDepthChannel->unmapBuffer(currentFrameNumber);
3714
3715 orchestrateResult(&result);
3716 } while (currentFrameNumber < frameNumber);
3717}
3718
3719/*===========================================================================
3720 * FUNCTION : notifyErrorFoPendingDepthData
3721 *
3722 * DESCRIPTION: Returns error for any pending depth buffers
3723 *
3724 * PARAMETERS : depthCh - depth channel that needs to get flushed
3725 *
3726 * RETURN :
3727 *
3728 *==========================================================================*/
3729void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3730 QCamera3DepthChannel *depthCh) {
3731 uint32_t currentFrameNumber;
3732 buffer_handle_t *depthBuffer;
3733
3734 if (nullptr == depthCh) {
3735 return;
3736 }
3737
3738 camera3_notify_msg_t notify_msg =
3739 {.type = CAMERA3_MSG_ERROR,
3740 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3741 camera3_stream_buffer_t resultBuffer =
3742 {.acquire_fence = -1,
3743 .release_fence = -1,
3744 .buffer = nullptr,
3745 .stream = depthCh->getStream(),
3746 .status = CAMERA3_BUFFER_STATUS_ERROR};
3747 camera3_capture_result_t result =
3748 {.result = nullptr,
3749 .frame_number = 0,
3750 .num_output_buffers = 1,
3751 .partial_result = 0,
3752 .output_buffers = &resultBuffer};
3753
3754 while (nullptr !=
3755 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3756 depthCh->unmapBuffer(currentFrameNumber);
3757
3758 notify_msg.message.error.frame_number = currentFrameNumber;
3759 orchestrateNotify(&notify_msg);
3760
3761 resultBuffer.buffer = depthBuffer;
3762 result.frame_number = currentFrameNumber;
3763 orchestrateResult(&result);
3764 };
3765}
3766
3767/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003768 * FUNCTION : hdrPlusPerfLock
3769 *
3770 * DESCRIPTION: perf lock for HDR+ using custom intent
3771 *
3772 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3773 *
3774 * RETURN : None
3775 *
3776 *==========================================================================*/
3777void QCamera3HardwareInterface::hdrPlusPerfLock(
3778 mm_camera_super_buf_t *metadata_buf)
3779{
3780 if (NULL == metadata_buf) {
3781 LOGE("metadata_buf is NULL");
3782 return;
3783 }
3784 metadata_buffer_t *metadata =
3785 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3786 int32_t *p_frame_number_valid =
3787 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3788 uint32_t *p_frame_number =
3789 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3790
3791 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3792 LOGE("%s: Invalid metadata", __func__);
3793 return;
3794 }
3795
3796 //acquire perf lock for 5 sec after the last HDR frame is captured
3797 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3798 if ((p_frame_number != NULL) &&
3799 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003800 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003801 }
3802 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003803}
3804
3805/*===========================================================================
3806 * FUNCTION : handleInputBufferWithLock
3807 *
3808 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3809 *
3810 * PARAMETERS : @frame_number: frame number of the input buffer
3811 *
3812 * RETURN :
3813 *
3814 *==========================================================================*/
3815void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3816{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003817 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 pendingRequestIterator i = mPendingRequestsList.begin();
3819 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3820 i++;
3821 }
3822 if (i != mPendingRequestsList.end() && i->input_buffer) {
3823 //found the right request
3824 if (!i->shutter_notified) {
3825 CameraMetadata settings;
3826 camera3_notify_msg_t notify_msg;
3827 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3828 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3829 if(i->settings) {
3830 settings = i->settings;
3831 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3832 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3833 } else {
3834 LOGE("No timestamp in input settings! Using current one.");
3835 }
3836 } else {
3837 LOGE("Input settings missing!");
3838 }
3839
3840 notify_msg.type = CAMERA3_MSG_SHUTTER;
3841 notify_msg.message.shutter.frame_number = frame_number;
3842 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003843 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003844 i->shutter_notified = true;
3845 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3846 i->frame_number, notify_msg.message.shutter.timestamp);
3847 }
3848
3849 if (i->input_buffer->release_fence != -1) {
3850 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3851 close(i->input_buffer->release_fence);
3852 if (rc != OK) {
3853 LOGE("input buffer sync wait failed %d", rc);
3854 }
3855 }
3856
3857 camera3_capture_result result;
3858 memset(&result, 0, sizeof(camera3_capture_result));
3859 result.frame_number = frame_number;
3860 result.result = i->settings;
3861 result.input_buffer = i->input_buffer;
3862 result.partial_result = PARTIAL_RESULT_COUNT;
3863
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003864 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003865 LOGD("Input request metadata and input buffer frame_number = %u",
3866 i->frame_number);
3867 i = erasePendingRequest(i);
3868 } else {
3869 LOGE("Could not find input request for frame number %d", frame_number);
3870 }
3871}
3872
3873/*===========================================================================
3874 * FUNCTION : handleBufferWithLock
3875 *
3876 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3877 *
3878 * PARAMETERS : @buffer: image buffer for the callback
3879 * @frame_number: frame number of the image buffer
3880 *
3881 * RETURN :
3882 *
3883 *==========================================================================*/
3884void QCamera3HardwareInterface::handleBufferWithLock(
3885 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3886{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003887 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003888
3889 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3890 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3891 }
3892
Thierry Strudel3d639192016-09-09 11:52:26 -07003893 /* Nothing to be done during error state */
3894 if ((ERROR == mState) || (DEINIT == mState)) {
3895 return;
3896 }
3897 if (mFlushPerf) {
3898 handleBuffersDuringFlushLock(buffer);
3899 return;
3900 }
3901 //not in flush
3902 // If the frame number doesn't exist in the pending request list,
3903 // directly send the buffer to the frameworks, and update pending buffers map
3904 // Otherwise, book-keep the buffer.
3905 pendingRequestIterator i = mPendingRequestsList.begin();
3906 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3907 i++;
3908 }
3909 if (i == mPendingRequestsList.end()) {
3910 // Verify all pending requests frame_numbers are greater
3911 for (pendingRequestIterator j = mPendingRequestsList.begin();
3912 j != mPendingRequestsList.end(); j++) {
3913 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3914 LOGW("Error: pending live frame number %d is smaller than %d",
3915 j->frame_number, frame_number);
3916 }
3917 }
3918 camera3_capture_result_t result;
3919 memset(&result, 0, sizeof(camera3_capture_result_t));
3920 result.result = NULL;
3921 result.frame_number = frame_number;
3922 result.num_output_buffers = 1;
3923 result.partial_result = 0;
3924 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3925 m != mPendingFrameDropList.end(); m++) {
3926 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3927 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3928 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3929 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3930 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3931 frame_number, streamID);
3932 m = mPendingFrameDropList.erase(m);
3933 break;
3934 }
3935 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003936 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003937 result.output_buffers = buffer;
3938 LOGH("result frame_number = %d, buffer = %p",
3939 frame_number, buffer->buffer);
3940
3941 mPendingBuffersMap.removeBuf(buffer->buffer);
3942
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003943 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003944 } else {
3945 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003946 if (i->input_buffer->release_fence != -1) {
3947 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3948 close(i->input_buffer->release_fence);
3949 if (rc != OK) {
3950 LOGE("input buffer sync wait failed %d", rc);
3951 }
3952 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003953 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003954
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003955 // Put buffer into the pending request
3956 for (auto &requestedBuffer : i->buffers) {
3957 if (requestedBuffer.stream == buffer->stream) {
3958 if (requestedBuffer.buffer != nullptr) {
3959 LOGE("Error: buffer is already set");
3960 } else {
3961 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3962 sizeof(camera3_stream_buffer_t));
3963 *(requestedBuffer.buffer) = *buffer;
3964 LOGH("cache buffer %p at result frame_number %u",
3965 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003966 }
3967 }
3968 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003969
3970 if (i->input_buffer) {
3971 // For a reprocessing request, try to send out shutter callback and result metadata.
3972 handlePendingResultsWithLock(frame_number, nullptr);
3973 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003974 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003975
3976 if (mPreviewStarted == false) {
3977 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3978 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3979 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3980 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3981 mPreviewStarted = true;
3982
3983 // Set power hint for preview
3984 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3985 }
3986 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003987}
3988
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003989void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3990 const camera_metadata_t *resultMetadata)
3991{
3992 // Find the pending request for this result metadata.
3993 auto requestIter = mPendingRequestsList.begin();
3994 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3995 requestIter++;
3996 }
3997
3998 if (requestIter == mPendingRequestsList.end()) {
3999 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4000 return;
4001 }
4002
4003 // Update the result metadata
4004 requestIter->resultMetadata = resultMetadata;
4005
4006 // Check what type of request this is.
4007 bool liveRequest = false;
4008 if (requestIter->hdrplus) {
4009 // HDR+ request doesn't have partial results.
4010 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4011 } else if (requestIter->input_buffer != nullptr) {
4012 // Reprocessing request result is the same as settings.
4013 requestIter->resultMetadata = requestIter->settings;
4014 // Reprocessing request doesn't have partial results.
4015 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4016 } else {
4017 liveRequest = true;
4018 requestIter->partial_result_cnt++;
4019 mPendingLiveRequest--;
4020
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004021 {
4022 Mutex::Autolock l(gHdrPlusClientLock);
4023 // For a live request, send the metadata to HDR+ client.
4024 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4025 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4026 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4027 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004028 }
4029 }
4030
4031 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4032 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4033 bool readyToSend = true;
4034
4035 // Iterate through the pending requests to send out shutter callbacks and results that are
4036 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4037 // live requests that don't have result metadata yet.
4038 auto iter = mPendingRequestsList.begin();
4039 while (iter != mPendingRequestsList.end()) {
4040 // Check if current pending request is ready. If it's not ready, the following pending
4041 // requests are also not ready.
4042 if (readyToSend && iter->resultMetadata == nullptr) {
4043 readyToSend = false;
4044 }
4045
4046 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4047
4048 std::vector<camera3_stream_buffer_t> outputBuffers;
4049
4050 camera3_capture_result_t result = {};
4051 result.frame_number = iter->frame_number;
4052 result.result = iter->resultMetadata;
4053 result.partial_result = iter->partial_result_cnt;
4054
4055 // If this pending buffer has result metadata, we may be able to send out shutter callback
4056 // and result metadata.
4057 if (iter->resultMetadata != nullptr) {
4058 if (!readyToSend) {
4059 // If any of the previous pending request is not ready, this pending request is
4060 // also not ready to send in order to keep shutter callbacks and result metadata
4061 // in order.
4062 iter++;
4063 continue;
4064 }
4065
4066 // Invoke shutter callback if not yet.
4067 if (!iter->shutter_notified) {
4068 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4069
4070 // Find the timestamp in HDR+ result metadata
4071 camera_metadata_ro_entry_t entry;
4072 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4073 ANDROID_SENSOR_TIMESTAMP, &entry);
4074 if (res != OK) {
4075 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4076 __FUNCTION__, iter->frame_number, strerror(-res), res);
4077 } else {
4078 timestamp = entry.data.i64[0];
4079 }
4080
4081 camera3_notify_msg_t notify_msg = {};
4082 notify_msg.type = CAMERA3_MSG_SHUTTER;
4083 notify_msg.message.shutter.frame_number = iter->frame_number;
4084 notify_msg.message.shutter.timestamp = timestamp;
4085 orchestrateNotify(&notify_msg);
4086 iter->shutter_notified = true;
4087 }
4088
4089 result.input_buffer = iter->input_buffer;
4090
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004091 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4092 // If the result metadata belongs to a live request, notify errors for previous pending
4093 // live requests.
4094 mPendingLiveRequest--;
4095
4096 CameraMetadata dummyMetadata;
4097 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4098 result.result = dummyMetadata.release();
4099
4100 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004101
4102 // partial_result should be PARTIAL_RESULT_CNT in case of
4103 // ERROR_RESULT.
4104 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4105 result.partial_result = PARTIAL_RESULT_COUNT;
4106
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004107 } else {
4108 iter++;
4109 continue;
4110 }
4111
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004112 // Prepare output buffer array
4113 for (auto bufferInfoIter = iter->buffers.begin();
4114 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4115 if (bufferInfoIter->buffer != nullptr) {
4116
4117 QCamera3Channel *channel =
4118 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4119 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4120
4121 // Check if this buffer is a dropped frame.
4122 auto frameDropIter = mPendingFrameDropList.begin();
4123 while (frameDropIter != mPendingFrameDropList.end()) {
4124 if((frameDropIter->stream_ID == streamID) &&
4125 (frameDropIter->frame_number == frameNumber)) {
4126 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4127 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4128 streamID);
4129 mPendingFrameDropList.erase(frameDropIter);
4130 break;
4131 } else {
4132 frameDropIter++;
4133 }
4134 }
4135
4136 // Check buffer error status
4137 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4138 bufferInfoIter->buffer->buffer);
4139 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4140
4141 outputBuffers.push_back(*(bufferInfoIter->buffer));
4142 free(bufferInfoIter->buffer);
4143 bufferInfoIter->buffer = NULL;
4144 }
4145 }
4146
4147 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4148 result.num_output_buffers = outputBuffers.size();
4149
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004150 orchestrateResult(&result);
4151
4152 // For reprocessing, result metadata is the same as settings so do not free it here to
4153 // avoid double free.
4154 if (result.result != iter->settings) {
4155 free_camera_metadata((camera_metadata_t *)result.result);
4156 }
4157 iter->resultMetadata = nullptr;
4158 iter = erasePendingRequest(iter);
4159 }
4160
4161 if (liveRequest) {
4162 for (auto &iter : mPendingRequestsList) {
4163 // Increment pipeline depth for the following pending requests.
4164 if (iter.frame_number > frameNumber) {
4165 iter.pipeline_depth++;
4166 }
4167 }
4168 }
4169
4170 unblockRequestIfNecessary();
4171}
4172
Thierry Strudel3d639192016-09-09 11:52:26 -07004173/*===========================================================================
4174 * FUNCTION : unblockRequestIfNecessary
4175 *
4176 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4177 * that mMutex is held when this function is called.
4178 *
4179 * PARAMETERS :
4180 *
4181 * RETURN :
4182 *
4183 *==========================================================================*/
4184void QCamera3HardwareInterface::unblockRequestIfNecessary()
4185{
4186 // Unblock process_capture_request
4187 pthread_cond_signal(&mRequestCond);
4188}
4189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004190/*===========================================================================
4191 * FUNCTION : isHdrSnapshotRequest
4192 *
4193 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4194 *
4195 * PARAMETERS : camera3 request structure
4196 *
4197 * RETURN : boolean decision variable
4198 *
4199 *==========================================================================*/
4200bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4201{
4202 if (request == NULL) {
4203 LOGE("Invalid request handle");
4204 assert(0);
4205 return false;
4206 }
4207
4208 if (!mForceHdrSnapshot) {
4209 CameraMetadata frame_settings;
4210 frame_settings = request->settings;
4211
4212 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4213 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4214 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4215 return false;
4216 }
4217 } else {
4218 return false;
4219 }
4220
4221 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4222 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4223 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4224 return false;
4225 }
4226 } else {
4227 return false;
4228 }
4229 }
4230
4231 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4232 if (request->output_buffers[i].stream->format
4233 == HAL_PIXEL_FORMAT_BLOB) {
4234 return true;
4235 }
4236 }
4237
4238 return false;
4239}
4240/*===========================================================================
4241 * FUNCTION : orchestrateRequest
4242 *
4243 * DESCRIPTION: Orchestrates a capture request from camera service
4244 *
4245 * PARAMETERS :
4246 * @request : request from framework to process
4247 *
4248 * RETURN : Error status codes
4249 *
4250 *==========================================================================*/
4251int32_t QCamera3HardwareInterface::orchestrateRequest(
4252 camera3_capture_request_t *request)
4253{
4254
4255 uint32_t originalFrameNumber = request->frame_number;
4256 uint32_t originalOutputCount = request->num_output_buffers;
4257 const camera_metadata_t *original_settings = request->settings;
4258 List<InternalRequest> internallyRequestedStreams;
4259 List<InternalRequest> emptyInternalList;
4260
4261 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4262 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4263 uint32_t internalFrameNumber;
4264 CameraMetadata modified_meta;
4265
4266
4267 /* Add Blob channel to list of internally requested streams */
4268 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4269 if (request->output_buffers[i].stream->format
4270 == HAL_PIXEL_FORMAT_BLOB) {
4271 InternalRequest streamRequested;
4272 streamRequested.meteringOnly = 1;
4273 streamRequested.need_metadata = 0;
4274 streamRequested.stream = request->output_buffers[i].stream;
4275 internallyRequestedStreams.push_back(streamRequested);
4276 }
4277 }
4278 request->num_output_buffers = 0;
4279 auto itr = internallyRequestedStreams.begin();
4280
4281 /* Modify setting to set compensation */
4282 modified_meta = request->settings;
4283 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4284 uint8_t aeLock = 1;
4285 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4286 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4287 camera_metadata_t *modified_settings = modified_meta.release();
4288 request->settings = modified_settings;
4289
4290 /* Capture Settling & -2x frame */
4291 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4292 request->frame_number = internalFrameNumber;
4293 processCaptureRequest(request, internallyRequestedStreams);
4294
4295 request->num_output_buffers = originalOutputCount;
4296 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4297 request->frame_number = internalFrameNumber;
4298 processCaptureRequest(request, emptyInternalList);
4299 request->num_output_buffers = 0;
4300
4301 modified_meta = modified_settings;
4302 expCompensation = 0;
4303 aeLock = 1;
4304 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4305 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4306 modified_settings = modified_meta.release();
4307 request->settings = modified_settings;
4308
4309 /* Capture Settling & 0X frame */
4310
4311 itr = internallyRequestedStreams.begin();
4312 if (itr == internallyRequestedStreams.end()) {
4313 LOGE("Error Internally Requested Stream list is empty");
4314 assert(0);
4315 } else {
4316 itr->need_metadata = 0;
4317 itr->meteringOnly = 1;
4318 }
4319
4320 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4321 request->frame_number = internalFrameNumber;
4322 processCaptureRequest(request, internallyRequestedStreams);
4323
4324 itr = internallyRequestedStreams.begin();
4325 if (itr == internallyRequestedStreams.end()) {
4326 ALOGE("Error Internally Requested Stream list is empty");
4327 assert(0);
4328 } else {
4329 itr->need_metadata = 1;
4330 itr->meteringOnly = 0;
4331 }
4332
4333 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4334 request->frame_number = internalFrameNumber;
4335 processCaptureRequest(request, internallyRequestedStreams);
4336
4337 /* Capture 2X frame*/
4338 modified_meta = modified_settings;
4339 expCompensation = GB_HDR_2X_STEP_EV;
4340 aeLock = 1;
4341 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4342 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4343 modified_settings = modified_meta.release();
4344 request->settings = modified_settings;
4345
4346 itr = internallyRequestedStreams.begin();
4347 if (itr == internallyRequestedStreams.end()) {
4348 ALOGE("Error Internally Requested Stream list is empty");
4349 assert(0);
4350 } else {
4351 itr->need_metadata = 0;
4352 itr->meteringOnly = 1;
4353 }
4354 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4355 request->frame_number = internalFrameNumber;
4356 processCaptureRequest(request, internallyRequestedStreams);
4357
4358 itr = internallyRequestedStreams.begin();
4359 if (itr == internallyRequestedStreams.end()) {
4360 ALOGE("Error Internally Requested Stream list is empty");
4361 assert(0);
4362 } else {
4363 itr->need_metadata = 1;
4364 itr->meteringOnly = 0;
4365 }
4366
4367 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4368 request->frame_number = internalFrameNumber;
4369 processCaptureRequest(request, internallyRequestedStreams);
4370
4371
4372 /* Capture 2X on original streaming config*/
4373 internallyRequestedStreams.clear();
4374
4375 /* Restore original settings pointer */
4376 request->settings = original_settings;
4377 } else {
4378 uint32_t internalFrameNumber;
4379 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4380 request->frame_number = internalFrameNumber;
4381 return processCaptureRequest(request, internallyRequestedStreams);
4382 }
4383
4384 return NO_ERROR;
4385}
4386
4387/*===========================================================================
4388 * FUNCTION : orchestrateResult
4389 *
4390 * DESCRIPTION: Orchestrates a capture result to camera service
4391 *
4392 * PARAMETERS :
4393 * @request : request from framework to process
4394 *
4395 * RETURN :
4396 *
4397 *==========================================================================*/
4398void QCamera3HardwareInterface::orchestrateResult(
4399 camera3_capture_result_t *result)
4400{
4401 uint32_t frameworkFrameNumber;
4402 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4403 frameworkFrameNumber);
4404 if (rc != NO_ERROR) {
4405 LOGE("Cannot find translated frameworkFrameNumber");
4406 assert(0);
4407 } else {
4408 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004409 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004410 } else {
4411 result->frame_number = frameworkFrameNumber;
4412 mCallbackOps->process_capture_result(mCallbackOps, result);
4413 }
4414 }
4415}
4416
4417/*===========================================================================
4418 * FUNCTION : orchestrateNotify
4419 *
4420 * DESCRIPTION: Orchestrates a notify to camera service
4421 *
4422 * PARAMETERS :
4423 * @request : request from framework to process
4424 *
4425 * RETURN :
4426 *
4427 *==========================================================================*/
4428void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4429{
4430 uint32_t frameworkFrameNumber;
4431 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004432 int32_t rc = NO_ERROR;
4433
4434 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004435 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004436
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004437 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004438 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4439 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4440 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004441 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004442 LOGE("Cannot find translated frameworkFrameNumber");
4443 assert(0);
4444 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004445 }
4446 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004447
4448 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4449 LOGD("Internal Request drop the notifyCb");
4450 } else {
4451 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4452 mCallbackOps->notify(mCallbackOps, notify_msg);
4453 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004454}
4455
4456/*===========================================================================
4457 * FUNCTION : FrameNumberRegistry
4458 *
4459 * DESCRIPTION: Constructor
4460 *
4461 * PARAMETERS :
4462 *
4463 * RETURN :
4464 *
4465 *==========================================================================*/
4466FrameNumberRegistry::FrameNumberRegistry()
4467{
4468 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4469}
4470
4471/*===========================================================================
4472 * FUNCTION : ~FrameNumberRegistry
4473 *
4474 * DESCRIPTION: Destructor
4475 *
4476 * PARAMETERS :
4477 *
4478 * RETURN :
4479 *
4480 *==========================================================================*/
4481FrameNumberRegistry::~FrameNumberRegistry()
4482{
4483}
4484
4485/*===========================================================================
4486 * FUNCTION : PurgeOldEntriesLocked
4487 *
4488 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4489 *
4490 * PARAMETERS :
4491 *
4492 * RETURN : NONE
4493 *
4494 *==========================================================================*/
4495void FrameNumberRegistry::purgeOldEntriesLocked()
4496{
4497 while (_register.begin() != _register.end()) {
4498 auto itr = _register.begin();
4499 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4500 _register.erase(itr);
4501 } else {
4502 return;
4503 }
4504 }
4505}
4506
4507/*===========================================================================
4508 * FUNCTION : allocStoreInternalFrameNumber
4509 *
4510 * DESCRIPTION: Method to note down a framework request and associate a new
4511 * internal request number against it
4512 *
4513 * PARAMETERS :
4514 * @fFrameNumber: Identifier given by framework
4515 * @internalFN : Output parameter which will have the newly generated internal
4516 * entry
4517 *
4518 * RETURN : Error code
4519 *
4520 *==========================================================================*/
4521int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4522 uint32_t &internalFrameNumber)
4523{
4524 Mutex::Autolock lock(mRegistryLock);
4525 internalFrameNumber = _nextFreeInternalNumber++;
4526 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4527 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4528 purgeOldEntriesLocked();
4529 return NO_ERROR;
4530}
4531
4532/*===========================================================================
4533 * FUNCTION : generateStoreInternalFrameNumber
4534 *
4535 * DESCRIPTION: Method to associate a new internal request number independent
4536 * of any associate with framework requests
4537 *
4538 * PARAMETERS :
4539 * @internalFrame#: Output parameter which will have the newly generated internal
4540 *
4541 *
4542 * RETURN : Error code
4543 *
4544 *==========================================================================*/
4545int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4546{
4547 Mutex::Autolock lock(mRegistryLock);
4548 internalFrameNumber = _nextFreeInternalNumber++;
4549 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4550 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4551 purgeOldEntriesLocked();
4552 return NO_ERROR;
4553}
4554
4555/*===========================================================================
4556 * FUNCTION : getFrameworkFrameNumber
4557 *
4558 * DESCRIPTION: Method to query the framework framenumber given an internal #
4559 *
4560 * PARAMETERS :
4561 * @internalFrame#: Internal reference
4562 * @frameworkframenumber: Output parameter holding framework frame entry
4563 *
4564 * RETURN : Error code
4565 *
4566 *==========================================================================*/
4567int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4568 uint32_t &frameworkFrameNumber)
4569{
4570 Mutex::Autolock lock(mRegistryLock);
4571 auto itr = _register.find(internalFrameNumber);
4572 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004573 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004574 return -ENOENT;
4575 }
4576
4577 frameworkFrameNumber = itr->second;
4578 purgeOldEntriesLocked();
4579 return NO_ERROR;
4580}
Thierry Strudel3d639192016-09-09 11:52:26 -07004581
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004582status_t QCamera3HardwareInterface::fillPbStreamConfig(
4583 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4584 QCamera3Channel *channel, uint32_t streamIndex) {
4585 if (config == nullptr) {
4586 LOGE("%s: config is null", __FUNCTION__);
4587 return BAD_VALUE;
4588 }
4589
4590 if (channel == nullptr) {
4591 LOGE("%s: channel is null", __FUNCTION__);
4592 return BAD_VALUE;
4593 }
4594
4595 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4596 if (stream == nullptr) {
4597 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4598 return NAME_NOT_FOUND;
4599 }
4600
4601 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4602 if (streamInfo == nullptr) {
4603 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4604 return NAME_NOT_FOUND;
4605 }
4606
4607 config->id = pbStreamId;
4608 config->image.width = streamInfo->dim.width;
4609 config->image.height = streamInfo->dim.height;
4610 config->image.padding = 0;
4611 config->image.format = pbStreamFormat;
4612
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004613 uint32_t totalPlaneSize = 0;
4614
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004615 // Fill plane information.
4616 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4617 pbcamera::PlaneConfiguration plane;
4618 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4619 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4620 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004621
4622 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004623 }
4624
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004625 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004626 return OK;
4627}
4628
Thierry Strudel3d639192016-09-09 11:52:26 -07004629/*===========================================================================
4630 * FUNCTION : processCaptureRequest
4631 *
4632 * DESCRIPTION: process a capture request from camera service
4633 *
4634 * PARAMETERS :
4635 * @request : request from framework to process
4636 *
4637 * RETURN :
4638 *
4639 *==========================================================================*/
4640int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004641 camera3_capture_request_t *request,
4642 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004643{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004644 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004645 int rc = NO_ERROR;
4646 int32_t request_id;
4647 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004648 bool isVidBufRequested = false;
4649 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004650 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004651
4652 pthread_mutex_lock(&mMutex);
4653
4654 // Validate current state
4655 switch (mState) {
4656 case CONFIGURED:
4657 case STARTED:
4658 /* valid state */
4659 break;
4660
4661 case ERROR:
4662 pthread_mutex_unlock(&mMutex);
4663 handleCameraDeviceError();
4664 return -ENODEV;
4665
4666 default:
4667 LOGE("Invalid state %d", mState);
4668 pthread_mutex_unlock(&mMutex);
4669 return -ENODEV;
4670 }
4671
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004672 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004673 if (rc != NO_ERROR) {
4674 LOGE("incoming request is not valid");
4675 pthread_mutex_unlock(&mMutex);
4676 return rc;
4677 }
4678
4679 meta = request->settings;
4680
4681 // For first capture request, send capture intent, and
4682 // stream on all streams
4683 if (mState == CONFIGURED) {
4684 // send an unconfigure to the backend so that the isp
4685 // resources are deallocated
4686 if (!mFirstConfiguration) {
4687 cam_stream_size_info_t stream_config_info;
4688 int32_t hal_version = CAM_HAL_V3;
4689 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4690 stream_config_info.buffer_info.min_buffers =
4691 MIN_INFLIGHT_REQUESTS;
4692 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004693 m_bIs4KVideo ? 0 :
4694 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004695 clear_metadata_buffer(mParameters);
4696 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4697 CAM_INTF_PARM_HAL_VERSION, hal_version);
4698 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4699 CAM_INTF_META_STREAM_INFO, stream_config_info);
4700 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4701 mParameters);
4702 if (rc < 0) {
4703 LOGE("set_parms for unconfigure failed");
4704 pthread_mutex_unlock(&mMutex);
4705 return rc;
4706 }
4707 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004708 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004709 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004710 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004711 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004712 property_get("persist.camera.is_type", is_type_value, "4");
4713 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4714 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4715 property_get("persist.camera.is_type_preview", is_type_value, "4");
4716 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4717 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004718
4719 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4720 int32_t hal_version = CAM_HAL_V3;
4721 uint8_t captureIntent =
4722 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4723 mCaptureIntent = captureIntent;
4724 clear_metadata_buffer(mParameters);
4725 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4727 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004728 if (mFirstConfiguration) {
4729 // configure instant AEC
4730 // Instant AEC is a session based parameter and it is needed only
4731 // once per complete session after open camera.
4732 // i.e. This is set only once for the first capture request, after open camera.
4733 setInstantAEC(meta);
4734 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004735 uint8_t fwkVideoStabMode=0;
4736 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4737 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4738 }
4739
4740 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4741 // turn it on for video/preview
4742 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4743 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004744 int32_t vsMode;
4745 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4747 rc = BAD_VALUE;
4748 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004749 LOGD("setEis %d", setEis);
4750 bool eis3Supported = false;
4751 size_t count = IS_TYPE_MAX;
4752 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4753 for (size_t i = 0; i < count; i++) {
4754 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4755 eis3Supported = true;
4756 break;
4757 }
4758 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004759
4760 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004761 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004762 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4763 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004764 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4765 is_type = isTypePreview;
4766 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4767 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4768 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004769 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004770 } else {
4771 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004772 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004773 } else {
4774 is_type = IS_TYPE_NONE;
4775 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004777 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004778 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4779 }
4780 }
4781
4782 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4783 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4784
Thierry Strudel54dc9782017-02-15 12:12:10 -08004785 //Disable tintless only if the property is set to 0
4786 memset(prop, 0, sizeof(prop));
4787 property_get("persist.camera.tintless.enable", prop, "1");
4788 int32_t tintless_value = atoi(prop);
4789
Thierry Strudel3d639192016-09-09 11:52:26 -07004790 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4791 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004792
Thierry Strudel3d639192016-09-09 11:52:26 -07004793 //Disable CDS for HFR mode or if DIS/EIS is on.
4794 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4795 //after every configure_stream
4796 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4797 (m_bIsVideo)) {
4798 int32_t cds = CAM_CDS_MODE_OFF;
4799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4800 CAM_INTF_PARM_CDS_MODE, cds))
4801 LOGE("Failed to disable CDS for HFR mode");
4802
4803 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004804
4805 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4806 uint8_t* use_av_timer = NULL;
4807
4808 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004809 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004810 use_av_timer = &m_debug_avtimer;
4811 }
4812 else{
4813 use_av_timer =
4814 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004815 if (use_av_timer) {
4816 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4817 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004818 }
4819
4820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4821 rc = BAD_VALUE;
4822 }
4823 }
4824
Thierry Strudel3d639192016-09-09 11:52:26 -07004825 setMobicat();
4826
4827 /* Set fps and hfr mode while sending meta stream info so that sensor
4828 * can configure appropriate streaming mode */
4829 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004830 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4831 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4833 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004834 if (rc == NO_ERROR) {
4835 int32_t max_fps =
4836 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004837 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4839 }
4840 /* For HFR, more buffers are dequeued upfront to improve the performance */
4841 if (mBatchSize) {
4842 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4843 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4844 }
4845 }
4846 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 LOGE("setHalFpsRange failed");
4848 }
4849 }
4850 if (meta.exists(ANDROID_CONTROL_MODE)) {
4851 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4852 rc = extractSceneMode(meta, metaMode, mParameters);
4853 if (rc != NO_ERROR) {
4854 LOGE("extractSceneMode failed");
4855 }
4856 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004857 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004858
Thierry Strudel04e026f2016-10-10 11:27:36 -07004859 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4860 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4861 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4862 rc = setVideoHdrMode(mParameters, vhdr);
4863 if (rc != NO_ERROR) {
4864 LOGE("setVideoHDR is failed");
4865 }
4866 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004867
Thierry Strudel3d639192016-09-09 11:52:26 -07004868 //TODO: validate the arguments, HSV scenemode should have only the
4869 //advertised fps ranges
4870
4871 /*set the capture intent, hal version, tintless, stream info,
4872 *and disenable parameters to the backend*/
4873 LOGD("set_parms META_STREAM_INFO " );
4874 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004875 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4876 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004877 mStreamConfigInfo.type[i],
4878 mStreamConfigInfo.stream_sizes[i].width,
4879 mStreamConfigInfo.stream_sizes[i].height,
4880 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 mStreamConfigInfo.format[i],
4882 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004883 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004884
Thierry Strudel3d639192016-09-09 11:52:26 -07004885 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4886 mParameters);
4887 if (rc < 0) {
4888 LOGE("set_parms failed for hal version, stream info");
4889 }
4890
Chien-Yu Chenee335912017-02-09 17:53:20 -08004891 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4892 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004893 if (rc != NO_ERROR) {
4894 LOGE("Failed to get sensor output size");
4895 pthread_mutex_unlock(&mMutex);
4896 goto error_exit;
4897 }
4898
4899 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4900 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004901 mSensorModeInfo.active_array_size.width,
4902 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004903
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004904 {
4905 Mutex::Autolock l(gHdrPlusClientLock);
4906 if (EaselManagerClientOpened) {
4907 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4908 if (rc != OK) {
4909 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4910 mCameraId, mSensorModeInfo.op_pixel_clk);
4911 pthread_mutex_unlock(&mMutex);
4912 goto error_exit;
4913 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004914 }
4915 }
4916
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 /* Set batchmode before initializing channel. Since registerBuffer
4918 * internally initializes some of the channels, better set batchmode
4919 * even before first register buffer */
4920 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4921 it != mStreamInfo.end(); it++) {
4922 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4923 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4924 && mBatchSize) {
4925 rc = channel->setBatchSize(mBatchSize);
4926 //Disable per frame map unmap for HFR/batchmode case
4927 rc |= channel->setPerFrameMapUnmap(false);
4928 if (NO_ERROR != rc) {
4929 LOGE("Channel init failed %d", rc);
4930 pthread_mutex_unlock(&mMutex);
4931 goto error_exit;
4932 }
4933 }
4934 }
4935
4936 //First initialize all streams
4937 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4938 it != mStreamInfo.end(); it++) {
4939 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4940 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4941 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004942 setEis) {
4943 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4944 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4945 is_type = mStreamConfigInfo.is_type[i];
4946 break;
4947 }
4948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004949 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004950 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004951 rc = channel->initialize(IS_TYPE_NONE);
4952 }
4953 if (NO_ERROR != rc) {
4954 LOGE("Channel initialization failed %d", rc);
4955 pthread_mutex_unlock(&mMutex);
4956 goto error_exit;
4957 }
4958 }
4959
4960 if (mRawDumpChannel) {
4961 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4962 if (rc != NO_ERROR) {
4963 LOGE("Error: Raw Dump Channel init failed");
4964 pthread_mutex_unlock(&mMutex);
4965 goto error_exit;
4966 }
4967 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004968 if (mHdrPlusRawSrcChannel) {
4969 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4970 if (rc != NO_ERROR) {
4971 LOGE("Error: HDR+ RAW Source Channel init failed");
4972 pthread_mutex_unlock(&mMutex);
4973 goto error_exit;
4974 }
4975 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004976 if (mSupportChannel) {
4977 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4978 if (rc < 0) {
4979 LOGE("Support channel initialization failed");
4980 pthread_mutex_unlock(&mMutex);
4981 goto error_exit;
4982 }
4983 }
4984 if (mAnalysisChannel) {
4985 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4986 if (rc < 0) {
4987 LOGE("Analysis channel initialization failed");
4988 pthread_mutex_unlock(&mMutex);
4989 goto error_exit;
4990 }
4991 }
4992 if (mDummyBatchChannel) {
4993 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4994 if (rc < 0) {
4995 LOGE("mDummyBatchChannel setBatchSize failed");
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004999 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 if (rc < 0) {
5001 LOGE("mDummyBatchChannel initialization failed");
5002 pthread_mutex_unlock(&mMutex);
5003 goto error_exit;
5004 }
5005 }
5006
5007 // Set bundle info
5008 rc = setBundleInfo();
5009 if (rc < 0) {
5010 LOGE("setBundleInfo failed %d", rc);
5011 pthread_mutex_unlock(&mMutex);
5012 goto error_exit;
5013 }
5014
5015 //update settings from app here
5016 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5017 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5018 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5019 }
5020 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5021 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5022 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5023 }
5024 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5025 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5026 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5027
5028 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5029 (mLinkedCameraId != mCameraId) ) {
5030 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5031 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 goto error_exit;
5034 }
5035 }
5036
5037 // add bundle related cameras
5038 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5039 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005040 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5041 &m_pDualCamCmdPtr->bundle_info;
5042 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005043 if (mIsDeviceLinked)
5044 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5045 else
5046 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5047
5048 pthread_mutex_lock(&gCamLock);
5049
5050 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5051 LOGE("Dualcam: Invalid Session Id ");
5052 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005053 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005054 goto error_exit;
5055 }
5056
5057 if (mIsMainCamera == 1) {
5058 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5059 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005060 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005061 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 // related session id should be session id of linked session
5063 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5064 } else {
5065 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5066 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005067 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005068 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005069 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5070 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005071 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 pthread_mutex_unlock(&gCamLock);
5073
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005074 rc = mCameraHandle->ops->set_dual_cam_cmd(
5075 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005076 if (rc < 0) {
5077 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005078 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005079 goto error_exit;
5080 }
5081 }
5082
5083 //Then start them.
5084 LOGH("Start META Channel");
5085 rc = mMetadataChannel->start();
5086 if (rc < 0) {
5087 LOGE("META channel start failed");
5088 pthread_mutex_unlock(&mMutex);
5089 goto error_exit;
5090 }
5091
5092 if (mAnalysisChannel) {
5093 rc = mAnalysisChannel->start();
5094 if (rc < 0) {
5095 LOGE("Analysis channel start failed");
5096 mMetadataChannel->stop();
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100 }
5101
5102 if (mSupportChannel) {
5103 rc = mSupportChannel->start();
5104 if (rc < 0) {
5105 LOGE("Support channel start failed");
5106 mMetadataChannel->stop();
5107 /* Although support and analysis are mutually exclusive today
5108 adding it in anycase for future proofing */
5109 if (mAnalysisChannel) {
5110 mAnalysisChannel->stop();
5111 }
5112 pthread_mutex_unlock(&mMutex);
5113 goto error_exit;
5114 }
5115 }
5116 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5117 it != mStreamInfo.end(); it++) {
5118 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5119 LOGH("Start Processing Channel mask=%d",
5120 channel->getStreamTypeMask());
5121 rc = channel->start();
5122 if (rc < 0) {
5123 LOGE("channel start failed");
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
5127 }
5128
5129 if (mRawDumpChannel) {
5130 LOGD("Starting raw dump stream");
5131 rc = mRawDumpChannel->start();
5132 if (rc != NO_ERROR) {
5133 LOGE("Error Starting Raw Dump Channel");
5134 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5135 it != mStreamInfo.end(); it++) {
5136 QCamera3Channel *channel =
5137 (QCamera3Channel *)(*it)->stream->priv;
5138 LOGH("Stopping Processing Channel mask=%d",
5139 channel->getStreamTypeMask());
5140 channel->stop();
5141 }
5142 if (mSupportChannel)
5143 mSupportChannel->stop();
5144 if (mAnalysisChannel) {
5145 mAnalysisChannel->stop();
5146 }
5147 mMetadataChannel->stop();
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
5151 }
5152
5153 if (mChannelHandle) {
5154
5155 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5156 mChannelHandle);
5157 if (rc != NO_ERROR) {
5158 LOGE("start_channel failed %d", rc);
5159 pthread_mutex_unlock(&mMutex);
5160 goto error_exit;
5161 }
5162 }
5163
5164 goto no_error;
5165error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005166 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005167 return rc;
5168no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005169 mWokenUpByDaemon = false;
5170 mPendingLiveRequest = 0;
5171 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005172 }
5173
Chien-Yu Chenee335912017-02-09 17:53:20 -08005174 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005175 {
5176 Mutex::Autolock l(gHdrPlusClientLock);
5177 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5178 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5179 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5180 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5181 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5182 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005183 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005184 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005185 pthread_mutex_unlock(&mMutex);
5186 return rc;
5187 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005188
5189 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005190 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005191 }
5192
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005194 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005195
5196 if (mFlushPerf) {
5197 //we cannot accept any requests during flush
5198 LOGE("process_capture_request cannot proceed during flush");
5199 pthread_mutex_unlock(&mMutex);
5200 return NO_ERROR; //should return an error
5201 }
5202
5203 if (meta.exists(ANDROID_REQUEST_ID)) {
5204 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5205 mCurrentRequestId = request_id;
5206 LOGD("Received request with id: %d", request_id);
5207 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5208 LOGE("Unable to find request id field, \
5209 & no previous id available");
5210 pthread_mutex_unlock(&mMutex);
5211 return NAME_NOT_FOUND;
5212 } else {
5213 LOGD("Re-using old request id");
5214 request_id = mCurrentRequestId;
5215 }
5216
5217 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5218 request->num_output_buffers,
5219 request->input_buffer,
5220 frameNumber);
5221 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005222 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005224 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 uint32_t snapshotStreamId = 0;
5226 for (size_t i = 0; i < request->num_output_buffers; i++) {
5227 const camera3_stream_buffer_t& output = request->output_buffers[i];
5228 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5229
Emilian Peev7650c122017-01-19 08:24:33 -08005230 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5231 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005232 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005233 blob_request = 1;
5234 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5235 }
5236
5237 if (output.acquire_fence != -1) {
5238 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5239 close(output.acquire_fence);
5240 if (rc != OK) {
5241 LOGE("sync wait failed %d", rc);
5242 pthread_mutex_unlock(&mMutex);
5243 return rc;
5244 }
5245 }
5246
Emilian Peev0f3c3162017-03-15 12:57:46 +00005247 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5248 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005249 depthRequestPresent = true;
5250 continue;
5251 }
5252
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005253 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005254 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005255
5256 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5257 isVidBufRequested = true;
5258 }
5259 }
5260
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005261 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5262 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5263 itr++) {
5264 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5265 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5266 channel->getStreamID(channel->getStreamTypeMask());
5267
5268 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5269 isVidBufRequested = true;
5270 }
5271 }
5272
Thierry Strudel3d639192016-09-09 11:52:26 -07005273 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005274 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005275 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005276 }
5277 if (blob_request && mRawDumpChannel) {
5278 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005279 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005280 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005281 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 }
5283
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005284 {
5285 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5286 // Request a RAW buffer if
5287 // 1. mHdrPlusRawSrcChannel is valid.
5288 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5289 // 3. There is no pending HDR+ request.
5290 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5291 mHdrPlusPendingRequests.size() == 0) {
5292 streamsArray.stream_request[streamsArray.num_streams].streamID =
5293 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5294 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5295 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005296 }
5297
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005298 //extract capture intent
5299 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5300 mCaptureIntent =
5301 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5302 }
5303
5304 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5305 mCacMode =
5306 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5307 }
5308
5309 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005310 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005311
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005312 {
5313 Mutex::Autolock l(gHdrPlusClientLock);
5314 // If this request has a still capture intent, try to submit an HDR+ request.
5315 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5316 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5317 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5318 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005319 }
5320
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005321 if (hdrPlusRequest) {
5322 // For a HDR+ request, just set the frame parameters.
5323 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5324 if (rc < 0) {
5325 LOGE("fail to set frame parameters");
5326 pthread_mutex_unlock(&mMutex);
5327 return rc;
5328 }
5329 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 /* Parse the settings:
5331 * - For every request in NORMAL MODE
5332 * - For every request in HFR mode during preview only case
5333 * - For first request of every batch in HFR mode during video
5334 * recording. In batchmode the same settings except frame number is
5335 * repeated in each request of the batch.
5336 */
5337 if (!mBatchSize ||
5338 (mBatchSize && !isVidBufRequested) ||
5339 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005340 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005341 if (rc < 0) {
5342 LOGE("fail to set frame parameters");
5343 pthread_mutex_unlock(&mMutex);
5344 return rc;
5345 }
5346 }
5347 /* For batchMode HFR, setFrameParameters is not called for every
5348 * request. But only frame number of the latest request is parsed.
5349 * Keep track of first and last frame numbers in a batch so that
5350 * metadata for the frame numbers of batch can be duplicated in
5351 * handleBatchMetadta */
5352 if (mBatchSize) {
5353 if (!mToBeQueuedVidBufs) {
5354 //start of the batch
5355 mFirstFrameNumberInBatch = request->frame_number;
5356 }
5357 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5358 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5359 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005360 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 return BAD_VALUE;
5362 }
5363 }
5364 if (mNeedSensorRestart) {
5365 /* Unlock the mutex as restartSensor waits on the channels to be
5366 * stopped, which in turn calls stream callback functions -
5367 * handleBufferWithLock and handleMetadataWithLock */
5368 pthread_mutex_unlock(&mMutex);
5369 rc = dynamicUpdateMetaStreamInfo();
5370 if (rc != NO_ERROR) {
5371 LOGE("Restarting the sensor failed");
5372 return BAD_VALUE;
5373 }
5374 mNeedSensorRestart = false;
5375 pthread_mutex_lock(&mMutex);
5376 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005377 if(mResetInstantAEC) {
5378 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5379 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5380 mResetInstantAEC = false;
5381 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005382 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 if (request->input_buffer->acquire_fence != -1) {
5384 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5385 close(request->input_buffer->acquire_fence);
5386 if (rc != OK) {
5387 LOGE("input buffer sync wait failed %d", rc);
5388 pthread_mutex_unlock(&mMutex);
5389 return rc;
5390 }
5391 }
5392 }
5393
5394 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5395 mLastCustIntentFrmNum = frameNumber;
5396 }
5397 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005398 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005399 pendingRequestIterator latestRequest;
5400 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005401 pendingRequest.num_buffers = depthRequestPresent ?
5402 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 pendingRequest.request_id = request_id;
5404 pendingRequest.blob_request = blob_request;
5405 pendingRequest.timestamp = 0;
5406 pendingRequest.bUrgentReceived = 0;
5407 if (request->input_buffer) {
5408 pendingRequest.input_buffer =
5409 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5410 *(pendingRequest.input_buffer) = *(request->input_buffer);
5411 pInputBuffer = pendingRequest.input_buffer;
5412 } else {
5413 pendingRequest.input_buffer = NULL;
5414 pInputBuffer = NULL;
5415 }
5416
5417 pendingRequest.pipeline_depth = 0;
5418 pendingRequest.partial_result_cnt = 0;
5419 extractJpegMetadata(mCurJpegMeta, request);
5420 pendingRequest.jpegMetadata = mCurJpegMeta;
5421 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5422 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005424 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5425 mHybridAeEnable =
5426 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5427 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005428
5429 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5430 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005431 /* DevCamDebug metadata processCaptureRequest */
5432 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5433 mDevCamDebugMetaEnable =
5434 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5435 }
5436 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5437 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005438
5439 //extract CAC info
5440 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5441 mCacMode =
5442 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5443 }
5444 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005445 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005446
5447 PendingBuffersInRequest bufsForCurRequest;
5448 bufsForCurRequest.frame_number = frameNumber;
5449 // Mark current timestamp for the new request
5450 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005451 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005453 if (hdrPlusRequest) {
5454 // Save settings for this request.
5455 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5456 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5457
5458 // Add to pending HDR+ request queue.
5459 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5460 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5461
5462 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5463 }
5464
Thierry Strudel3d639192016-09-09 11:52:26 -07005465 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005466 if ((request->output_buffers[i].stream->data_space ==
5467 HAL_DATASPACE_DEPTH) &&
5468 (HAL_PIXEL_FORMAT_BLOB ==
5469 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005470 continue;
5471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005472 RequestedBufferInfo requestedBuf;
5473 memset(&requestedBuf, 0, sizeof(requestedBuf));
5474 requestedBuf.stream = request->output_buffers[i].stream;
5475 requestedBuf.buffer = NULL;
5476 pendingRequest.buffers.push_back(requestedBuf);
5477
5478 // Add to buffer handle the pending buffers list
5479 PendingBufferInfo bufferInfo;
5480 bufferInfo.buffer = request->output_buffers[i].buffer;
5481 bufferInfo.stream = request->output_buffers[i].stream;
5482 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5483 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5484 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5485 frameNumber, bufferInfo.buffer,
5486 channel->getStreamTypeMask(), bufferInfo.stream->format);
5487 }
5488 // Add this request packet into mPendingBuffersMap
5489 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5490 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5491 mPendingBuffersMap.get_num_overall_buffers());
5492
5493 latestRequest = mPendingRequestsList.insert(
5494 mPendingRequestsList.end(), pendingRequest);
5495 if(mFlush) {
5496 LOGI("mFlush is true");
5497 pthread_mutex_unlock(&mMutex);
5498 return NO_ERROR;
5499 }
5500
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005501 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5502 // channel.
5503 if (!hdrPlusRequest) {
5504 int indexUsed;
5505 // Notify metadata channel we receive a request
5506 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005507
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005508 if(request->input_buffer != NULL){
5509 LOGD("Input request, frame_number %d", frameNumber);
5510 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5511 if (NO_ERROR != rc) {
5512 LOGE("fail to set reproc parameters");
5513 pthread_mutex_unlock(&mMutex);
5514 return rc;
5515 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005516 }
5517
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005518 // Call request on other streams
5519 uint32_t streams_need_metadata = 0;
5520 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5521 for (size_t i = 0; i < request->num_output_buffers; i++) {
5522 const camera3_stream_buffer_t& output = request->output_buffers[i];
5523 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5524
5525 if (channel == NULL) {
5526 LOGW("invalid channel pointer for stream");
5527 continue;
5528 }
5529
5530 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5531 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5532 output.buffer, request->input_buffer, frameNumber);
5533 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005535 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5536 if (rc < 0) {
5537 LOGE("Fail to request on picture channel");
5538 pthread_mutex_unlock(&mMutex);
5539 return rc;
5540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005542 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5543 assert(NULL != mDepthChannel);
5544 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005545
Emilian Peev7650c122017-01-19 08:24:33 -08005546 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5547 if (rc < 0) {
5548 LOGE("Fail to map on depth buffer");
5549 pthread_mutex_unlock(&mMutex);
5550 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005551 }
Emilian Peev7650c122017-01-19 08:24:33 -08005552 } else {
5553 LOGD("snapshot request with buffer %p, frame_number %d",
5554 output.buffer, frameNumber);
5555 if (!request->settings) {
5556 rc = channel->request(output.buffer, frameNumber,
5557 NULL, mPrevParameters, indexUsed);
5558 } else {
5559 rc = channel->request(output.buffer, frameNumber,
5560 NULL, mParameters, indexUsed);
5561 }
5562 if (rc < 0) {
5563 LOGE("Fail to request on picture channel");
5564 pthread_mutex_unlock(&mMutex);
5565 return rc;
5566 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005567
Emilian Peev7650c122017-01-19 08:24:33 -08005568 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5569 uint32_t j = 0;
5570 for (j = 0; j < streamsArray.num_streams; j++) {
5571 if (streamsArray.stream_request[j].streamID == streamId) {
5572 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5573 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5574 else
5575 streamsArray.stream_request[j].buf_index = indexUsed;
5576 break;
5577 }
5578 }
5579 if (j == streamsArray.num_streams) {
5580 LOGE("Did not find matching stream to update index");
5581 assert(0);
5582 }
5583
5584 pendingBufferIter->need_metadata = true;
5585 streams_need_metadata++;
5586 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005587 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5589 bool needMetadata = false;
5590 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5591 rc = yuvChannel->request(output.buffer, frameNumber,
5592 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5593 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005594 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005595 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005596 pthread_mutex_unlock(&mMutex);
5597 return rc;
5598 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005599
5600 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5601 uint32_t j = 0;
5602 for (j = 0; j < streamsArray.num_streams; j++) {
5603 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005604 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5605 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5606 else
5607 streamsArray.stream_request[j].buf_index = indexUsed;
5608 break;
5609 }
5610 }
5611 if (j == streamsArray.num_streams) {
5612 LOGE("Did not find matching stream to update index");
5613 assert(0);
5614 }
5615
5616 pendingBufferIter->need_metadata = needMetadata;
5617 if (needMetadata)
5618 streams_need_metadata += 1;
5619 LOGD("calling YUV channel request, need_metadata is %d",
5620 needMetadata);
5621 } else {
5622 LOGD("request with buffer %p, frame_number %d",
5623 output.buffer, frameNumber);
5624
5625 rc = channel->request(output.buffer, frameNumber, indexUsed);
5626
5627 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5628 uint32_t j = 0;
5629 for (j = 0; j < streamsArray.num_streams; j++) {
5630 if (streamsArray.stream_request[j].streamID == streamId) {
5631 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5632 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5633 else
5634 streamsArray.stream_request[j].buf_index = indexUsed;
5635 break;
5636 }
5637 }
5638 if (j == streamsArray.num_streams) {
5639 LOGE("Did not find matching stream to update index");
5640 assert(0);
5641 }
5642
5643 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5644 && mBatchSize) {
5645 mToBeQueuedVidBufs++;
5646 if (mToBeQueuedVidBufs == mBatchSize) {
5647 channel->queueBatchBuf();
5648 }
5649 }
5650 if (rc < 0) {
5651 LOGE("request failed");
5652 pthread_mutex_unlock(&mMutex);
5653 return rc;
5654 }
5655 }
5656 pendingBufferIter++;
5657 }
5658
5659 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5660 itr++) {
5661 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5662
5663 if (channel == NULL) {
5664 LOGE("invalid channel pointer for stream");
5665 assert(0);
5666 return BAD_VALUE;
5667 }
5668
5669 InternalRequest requestedStream;
5670 requestedStream = (*itr);
5671
5672
5673 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5674 LOGD("snapshot request internally input buffer %p, frame_number %d",
5675 request->input_buffer, frameNumber);
5676 if(request->input_buffer != NULL){
5677 rc = channel->request(NULL, frameNumber,
5678 pInputBuffer, &mReprocMeta, indexUsed, true,
5679 requestedStream.meteringOnly);
5680 if (rc < 0) {
5681 LOGE("Fail to request on picture channel");
5682 pthread_mutex_unlock(&mMutex);
5683 return rc;
5684 }
5685 } else {
5686 LOGD("snapshot request with frame_number %d", frameNumber);
5687 if (!request->settings) {
5688 rc = channel->request(NULL, frameNumber,
5689 NULL, mPrevParameters, indexUsed, true,
5690 requestedStream.meteringOnly);
5691 } else {
5692 rc = channel->request(NULL, frameNumber,
5693 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5694 }
5695 if (rc < 0) {
5696 LOGE("Fail to request on picture channel");
5697 pthread_mutex_unlock(&mMutex);
5698 return rc;
5699 }
5700
5701 if ((*itr).meteringOnly != 1) {
5702 requestedStream.need_metadata = 1;
5703 streams_need_metadata++;
5704 }
5705 }
5706
5707 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5708 uint32_t j = 0;
5709 for (j = 0; j < streamsArray.num_streams; j++) {
5710 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005711 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5712 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5713 else
5714 streamsArray.stream_request[j].buf_index = indexUsed;
5715 break;
5716 }
5717 }
5718 if (j == streamsArray.num_streams) {
5719 LOGE("Did not find matching stream to update index");
5720 assert(0);
5721 }
5722
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005723 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005724 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005725 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005726 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005727 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005728 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005729 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731 //If 2 streams have need_metadata set to true, fail the request, unless
5732 //we copy/reference count the metadata buffer
5733 if (streams_need_metadata > 1) {
5734 LOGE("not supporting request in which two streams requires"
5735 " 2 HAL metadata for reprocessing");
5736 pthread_mutex_unlock(&mMutex);
5737 return -EINVAL;
5738 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005739
Emilian Peev7650c122017-01-19 08:24:33 -08005740 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5742 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5743 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5744 pthread_mutex_unlock(&mMutex);
5745 return BAD_VALUE;
5746 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005747 if (request->input_buffer == NULL) {
5748 /* Set the parameters to backend:
5749 * - For every request in NORMAL MODE
5750 * - For every request in HFR mode during preview only case
5751 * - Once every batch in HFR mode during video recording
5752 */
5753 if (!mBatchSize ||
5754 (mBatchSize && !isVidBufRequested) ||
5755 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5756 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5757 mBatchSize, isVidBufRequested,
5758 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005759
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005760 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5761 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5762 uint32_t m = 0;
5763 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5764 if (streamsArray.stream_request[k].streamID ==
5765 mBatchedStreamsArray.stream_request[m].streamID)
5766 break;
5767 }
5768 if (m == mBatchedStreamsArray.num_streams) {
5769 mBatchedStreamsArray.stream_request\
5770 [mBatchedStreamsArray.num_streams].streamID =
5771 streamsArray.stream_request[k].streamID;
5772 mBatchedStreamsArray.stream_request\
5773 [mBatchedStreamsArray.num_streams].buf_index =
5774 streamsArray.stream_request[k].buf_index;
5775 mBatchedStreamsArray.num_streams =
5776 mBatchedStreamsArray.num_streams + 1;
5777 }
5778 }
5779 streamsArray = mBatchedStreamsArray;
5780 }
5781 /* Update stream id of all the requested buffers */
5782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5783 streamsArray)) {
5784 LOGE("Failed to set stream type mask in the parameters");
5785 return BAD_VALUE;
5786 }
5787
5788 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5789 mParameters);
5790 if (rc < 0) {
5791 LOGE("set_parms failed");
5792 }
5793 /* reset to zero coz, the batch is queued */
5794 mToBeQueuedVidBufs = 0;
5795 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5796 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5797 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005798 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5799 uint32_t m = 0;
5800 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5801 if (streamsArray.stream_request[k].streamID ==
5802 mBatchedStreamsArray.stream_request[m].streamID)
5803 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005804 }
5805 if (m == mBatchedStreamsArray.num_streams) {
5806 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5807 streamID = streamsArray.stream_request[k].streamID;
5808 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5809 buf_index = streamsArray.stream_request[k].buf_index;
5810 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5811 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005812 }
5813 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005814 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005815 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005816 }
5817
5818 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5819
5820 mState = STARTED;
5821 // Added a timed condition wait
5822 struct timespec ts;
5823 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005824 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005825 if (rc < 0) {
5826 isValidTimeout = 0;
5827 LOGE("Error reading the real time clock!!");
5828 }
5829 else {
5830 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005831 int64_t timeout = 5;
5832 {
5833 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5834 // If there is a pending HDR+ request, the following requests may be blocked until the
5835 // HDR+ request is done. So allow a longer timeout.
5836 if (mHdrPlusPendingRequests.size() > 0) {
5837 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5838 }
5839 }
5840 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005841 }
5842 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005843 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005844 (mState != ERROR) && (mState != DEINIT)) {
5845 if (!isValidTimeout) {
5846 LOGD("Blocking on conditional wait");
5847 pthread_cond_wait(&mRequestCond, &mMutex);
5848 }
5849 else {
5850 LOGD("Blocking on timed conditional wait");
5851 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5852 if (rc == ETIMEDOUT) {
5853 rc = -ENODEV;
5854 LOGE("Unblocked on timeout!!!!");
5855 break;
5856 }
5857 }
5858 LOGD("Unblocked");
5859 if (mWokenUpByDaemon) {
5860 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005861 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005862 break;
5863 }
5864 }
5865 pthread_mutex_unlock(&mMutex);
5866
5867 return rc;
5868}
5869
5870/*===========================================================================
5871 * FUNCTION : dump
5872 *
5873 * DESCRIPTION:
5874 *
5875 * PARAMETERS :
5876 *
5877 *
5878 * RETURN :
5879 *==========================================================================*/
5880void QCamera3HardwareInterface::dump(int fd)
5881{
5882 pthread_mutex_lock(&mMutex);
5883 dprintf(fd, "\n Camera HAL3 information Begin \n");
5884
5885 dprintf(fd, "\nNumber of pending requests: %zu \n",
5886 mPendingRequestsList.size());
5887 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5888 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5889 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5890 for(pendingRequestIterator i = mPendingRequestsList.begin();
5891 i != mPendingRequestsList.end(); i++) {
5892 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5893 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5894 i->input_buffer);
5895 }
5896 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5897 mPendingBuffersMap.get_num_overall_buffers());
5898 dprintf(fd, "-------+------------------\n");
5899 dprintf(fd, " Frame | Stream type mask \n");
5900 dprintf(fd, "-------+------------------\n");
5901 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5902 for(auto &j : req.mPendingBufferList) {
5903 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5904 dprintf(fd, " %5d | %11d \n",
5905 req.frame_number, channel->getStreamTypeMask());
5906 }
5907 }
5908 dprintf(fd, "-------+------------------\n");
5909
5910 dprintf(fd, "\nPending frame drop list: %zu\n",
5911 mPendingFrameDropList.size());
5912 dprintf(fd, "-------+-----------\n");
5913 dprintf(fd, " Frame | Stream ID \n");
5914 dprintf(fd, "-------+-----------\n");
5915 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5916 i != mPendingFrameDropList.end(); i++) {
5917 dprintf(fd, " %5d | %9d \n",
5918 i->frame_number, i->stream_ID);
5919 }
5920 dprintf(fd, "-------+-----------\n");
5921
5922 dprintf(fd, "\n Camera HAL3 information End \n");
5923
5924 /* use dumpsys media.camera as trigger to send update debug level event */
5925 mUpdateDebugLevel = true;
5926 pthread_mutex_unlock(&mMutex);
5927 return;
5928}
5929
5930/*===========================================================================
5931 * FUNCTION : flush
5932 *
5933 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5934 * conditionally restarts channels
5935 *
5936 * PARAMETERS :
5937 * @ restartChannels: re-start all channels
5938 *
5939 *
5940 * RETURN :
5941 * 0 on success
5942 * Error code on failure
5943 *==========================================================================*/
5944int QCamera3HardwareInterface::flush(bool restartChannels)
5945{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005946 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005947 int32_t rc = NO_ERROR;
5948
5949 LOGD("Unblocking Process Capture Request");
5950 pthread_mutex_lock(&mMutex);
5951 mFlush = true;
5952 pthread_mutex_unlock(&mMutex);
5953
5954 rc = stopAllChannels();
5955 // unlink of dualcam
5956 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005957 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5958 &m_pDualCamCmdPtr->bundle_info;
5959 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005960 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5961 pthread_mutex_lock(&gCamLock);
5962
5963 if (mIsMainCamera == 1) {
5964 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5965 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005966 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005967 // related session id should be session id of linked session
5968 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5969 } else {
5970 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5971 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005972 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005973 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5974 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005975 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 pthread_mutex_unlock(&gCamLock);
5977
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005978 rc = mCameraHandle->ops->set_dual_cam_cmd(
5979 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005980 if (rc < 0) {
5981 LOGE("Dualcam: Unlink failed, but still proceed to close");
5982 }
5983 }
5984
5985 if (rc < 0) {
5986 LOGE("stopAllChannels failed");
5987 return rc;
5988 }
5989 if (mChannelHandle) {
5990 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5991 mChannelHandle);
5992 }
5993
5994 // Reset bundle info
5995 rc = setBundleInfo();
5996 if (rc < 0) {
5997 LOGE("setBundleInfo failed %d", rc);
5998 return rc;
5999 }
6000
6001 // Mutex Lock
6002 pthread_mutex_lock(&mMutex);
6003
6004 // Unblock process_capture_request
6005 mPendingLiveRequest = 0;
6006 pthread_cond_signal(&mRequestCond);
6007
6008 rc = notifyErrorForPendingRequests();
6009 if (rc < 0) {
6010 LOGE("notifyErrorForPendingRequests failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 mFlush = false;
6016
6017 // Start the Streams/Channels
6018 if (restartChannels) {
6019 rc = startAllChannels();
6020 if (rc < 0) {
6021 LOGE("startAllChannels failed");
6022 pthread_mutex_unlock(&mMutex);
6023 return rc;
6024 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006025 if (mChannelHandle) {
6026 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6027 mChannelHandle);
6028 if (rc < 0) {
6029 LOGE("start_channel failed");
6030 pthread_mutex_unlock(&mMutex);
6031 return rc;
6032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006033 }
6034 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006035 pthread_mutex_unlock(&mMutex);
6036
6037 return 0;
6038}
6039
6040/*===========================================================================
6041 * FUNCTION : flushPerf
6042 *
6043 * DESCRIPTION: This is the performance optimization version of flush that does
6044 * not use stream off, rather flushes the system
6045 *
6046 * PARAMETERS :
6047 *
6048 *
6049 * RETURN : 0 : success
6050 * -EINVAL: input is malformed (device is not valid)
6051 * -ENODEV: if the device has encountered a serious error
6052 *==========================================================================*/
6053int QCamera3HardwareInterface::flushPerf()
6054{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006055 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006056 int32_t rc = 0;
6057 struct timespec timeout;
6058 bool timed_wait = false;
6059
6060 pthread_mutex_lock(&mMutex);
6061 mFlushPerf = true;
6062 mPendingBuffersMap.numPendingBufsAtFlush =
6063 mPendingBuffersMap.get_num_overall_buffers();
6064 LOGD("Calling flush. Wait for %d buffers to return",
6065 mPendingBuffersMap.numPendingBufsAtFlush);
6066
6067 /* send the flush event to the backend */
6068 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6069 if (rc < 0) {
6070 LOGE("Error in flush: IOCTL failure");
6071 mFlushPerf = false;
6072 pthread_mutex_unlock(&mMutex);
6073 return -ENODEV;
6074 }
6075
6076 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6077 LOGD("No pending buffers in HAL, return flush");
6078 mFlushPerf = false;
6079 pthread_mutex_unlock(&mMutex);
6080 return rc;
6081 }
6082
6083 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006084 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 if (rc < 0) {
6086 LOGE("Error reading the real time clock, cannot use timed wait");
6087 } else {
6088 timeout.tv_sec += FLUSH_TIMEOUT;
6089 timed_wait = true;
6090 }
6091
6092 //Block on conditional variable
6093 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6094 LOGD("Waiting on mBuffersCond");
6095 if (!timed_wait) {
6096 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6097 if (rc != 0) {
6098 LOGE("pthread_cond_wait failed due to rc = %s",
6099 strerror(rc));
6100 break;
6101 }
6102 } else {
6103 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6104 if (rc != 0) {
6105 LOGE("pthread_cond_timedwait failed due to rc = %s",
6106 strerror(rc));
6107 break;
6108 }
6109 }
6110 }
6111 if (rc != 0) {
6112 mFlushPerf = false;
6113 pthread_mutex_unlock(&mMutex);
6114 return -ENODEV;
6115 }
6116
6117 LOGD("Received buffers, now safe to return them");
6118
6119 //make sure the channels handle flush
6120 //currently only required for the picture channel to release snapshot resources
6121 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6122 it != mStreamInfo.end(); it++) {
6123 QCamera3Channel *channel = (*it)->channel;
6124 if (channel) {
6125 rc = channel->flush();
6126 if (rc) {
6127 LOGE("Flushing the channels failed with error %d", rc);
6128 // even though the channel flush failed we need to continue and
6129 // return the buffers we have to the framework, however the return
6130 // value will be an error
6131 rc = -ENODEV;
6132 }
6133 }
6134 }
6135
6136 /* notify the frameworks and send errored results */
6137 rc = notifyErrorForPendingRequests();
6138 if (rc < 0) {
6139 LOGE("notifyErrorForPendingRequests failed");
6140 pthread_mutex_unlock(&mMutex);
6141 return rc;
6142 }
6143
6144 //unblock process_capture_request
6145 mPendingLiveRequest = 0;
6146 unblockRequestIfNecessary();
6147
6148 mFlushPerf = false;
6149 pthread_mutex_unlock(&mMutex);
6150 LOGD ("Flush Operation complete. rc = %d", rc);
6151 return rc;
6152}
6153
6154/*===========================================================================
6155 * FUNCTION : handleCameraDeviceError
6156 *
6157 * DESCRIPTION: This function calls internal flush and notifies the error to
6158 * framework and updates the state variable.
6159 *
6160 * PARAMETERS : None
6161 *
6162 * RETURN : NO_ERROR on Success
6163 * Error code on failure
6164 *==========================================================================*/
6165int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6166{
6167 int32_t rc = NO_ERROR;
6168
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006169 {
6170 Mutex::Autolock lock(mFlushLock);
6171 pthread_mutex_lock(&mMutex);
6172 if (mState != ERROR) {
6173 //if mState != ERROR, nothing to be done
6174 pthread_mutex_unlock(&mMutex);
6175 return NO_ERROR;
6176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006177 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006178
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006179 rc = flush(false /* restart channels */);
6180 if (NO_ERROR != rc) {
6181 LOGE("internal flush to handle mState = ERROR failed");
6182 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006183
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006184 pthread_mutex_lock(&mMutex);
6185 mState = DEINIT;
6186 pthread_mutex_unlock(&mMutex);
6187 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006188
6189 camera3_notify_msg_t notify_msg;
6190 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6191 notify_msg.type = CAMERA3_MSG_ERROR;
6192 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6193 notify_msg.message.error.error_stream = NULL;
6194 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006195 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006196
6197 return rc;
6198}
6199
6200/*===========================================================================
6201 * FUNCTION : captureResultCb
6202 *
6203 * DESCRIPTION: Callback handler for all capture result
6204 * (streams, as well as metadata)
6205 *
6206 * PARAMETERS :
6207 * @metadata : metadata information
6208 * @buffer : actual gralloc buffer to be returned to frameworks.
6209 * NULL if metadata.
6210 *
6211 * RETURN : NONE
6212 *==========================================================================*/
6213void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6214 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6215{
6216 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006217 pthread_mutex_lock(&mMutex);
6218 uint8_t batchSize = mBatchSize;
6219 pthread_mutex_unlock(&mMutex);
6220 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006221 handleBatchMetadata(metadata_buf,
6222 true /* free_and_bufdone_meta_buf */);
6223 } else { /* mBatchSize = 0 */
6224 hdrPlusPerfLock(metadata_buf);
6225 pthread_mutex_lock(&mMutex);
6226 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006227 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006228 true /* last urgent frame of batch metadata */,
6229 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006230 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006231 pthread_mutex_unlock(&mMutex);
6232 }
6233 } else if (isInputBuffer) {
6234 pthread_mutex_lock(&mMutex);
6235 handleInputBufferWithLock(frame_number);
6236 pthread_mutex_unlock(&mMutex);
6237 } else {
6238 pthread_mutex_lock(&mMutex);
6239 handleBufferWithLock(buffer, frame_number);
6240 pthread_mutex_unlock(&mMutex);
6241 }
6242 return;
6243}
6244
6245/*===========================================================================
6246 * FUNCTION : getReprocessibleOutputStreamId
6247 *
6248 * DESCRIPTION: Get source output stream id for the input reprocess stream
6249 * based on size and format, which would be the largest
6250 * output stream if an input stream exists.
6251 *
6252 * PARAMETERS :
6253 * @id : return the stream id if found
6254 *
6255 * RETURN : int32_t type of status
6256 * NO_ERROR -- success
6257 * none-zero failure code
6258 *==========================================================================*/
6259int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6260{
6261 /* check if any output or bidirectional stream with the same size and format
6262 and return that stream */
6263 if ((mInputStreamInfo.dim.width > 0) &&
6264 (mInputStreamInfo.dim.height > 0)) {
6265 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6266 it != mStreamInfo.end(); it++) {
6267
6268 camera3_stream_t *stream = (*it)->stream;
6269 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6270 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6271 (stream->format == mInputStreamInfo.format)) {
6272 // Usage flag for an input stream and the source output stream
6273 // may be different.
6274 LOGD("Found reprocessible output stream! %p", *it);
6275 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6276 stream->usage, mInputStreamInfo.usage);
6277
6278 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6279 if (channel != NULL && channel->mStreams[0]) {
6280 id = channel->mStreams[0]->getMyServerID();
6281 return NO_ERROR;
6282 }
6283 }
6284 }
6285 } else {
6286 LOGD("No input stream, so no reprocessible output stream");
6287 }
6288 return NAME_NOT_FOUND;
6289}
6290
6291/*===========================================================================
6292 * FUNCTION : lookupFwkName
6293 *
6294 * DESCRIPTION: In case the enum is not same in fwk and backend
6295 * make sure the parameter is correctly propogated
6296 *
6297 * PARAMETERS :
6298 * @arr : map between the two enums
6299 * @len : len of the map
6300 * @hal_name : name of the hal_parm to map
6301 *
6302 * RETURN : int type of status
6303 * fwk_name -- success
6304 * none-zero failure code
6305 *==========================================================================*/
6306template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6307 size_t len, halType hal_name)
6308{
6309
6310 for (size_t i = 0; i < len; i++) {
6311 if (arr[i].hal_name == hal_name) {
6312 return arr[i].fwk_name;
6313 }
6314 }
6315
6316 /* Not able to find matching framework type is not necessarily
6317 * an error case. This happens when mm-camera supports more attributes
6318 * than the frameworks do */
6319 LOGH("Cannot find matching framework type");
6320 return NAME_NOT_FOUND;
6321}
6322
6323/*===========================================================================
6324 * FUNCTION : lookupHalName
6325 *
6326 * DESCRIPTION: In case the enum is not same in fwk and backend
6327 * make sure the parameter is correctly propogated
6328 *
6329 * PARAMETERS :
6330 * @arr : map between the two enums
6331 * @len : len of the map
6332 * @fwk_name : name of the hal_parm to map
6333 *
6334 * RETURN : int32_t type of status
6335 * hal_name -- success
6336 * none-zero failure code
6337 *==========================================================================*/
6338template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6339 size_t len, fwkType fwk_name)
6340{
6341 for (size_t i = 0; i < len; i++) {
6342 if (arr[i].fwk_name == fwk_name) {
6343 return arr[i].hal_name;
6344 }
6345 }
6346
6347 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6348 return NAME_NOT_FOUND;
6349}
6350
6351/*===========================================================================
6352 * FUNCTION : lookupProp
6353 *
6354 * DESCRIPTION: lookup a value by its name
6355 *
6356 * PARAMETERS :
6357 * @arr : map between the two enums
6358 * @len : size of the map
6359 * @name : name to be looked up
6360 *
6361 * RETURN : Value if found
6362 * CAM_CDS_MODE_MAX if not found
6363 *==========================================================================*/
6364template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6365 size_t len, const char *name)
6366{
6367 if (name) {
6368 for (size_t i = 0; i < len; i++) {
6369 if (!strcmp(arr[i].desc, name)) {
6370 return arr[i].val;
6371 }
6372 }
6373 }
6374 return CAM_CDS_MODE_MAX;
6375}
6376
6377/*===========================================================================
6378 *
6379 * DESCRIPTION:
6380 *
6381 * PARAMETERS :
6382 * @metadata : metadata information from callback
6383 * @timestamp: metadata buffer timestamp
6384 * @request_id: request id
6385 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006386 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006387 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6388 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006389 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006390 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6391 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006392 *
6393 * RETURN : camera_metadata_t*
6394 * metadata in a format specified by fwk
6395 *==========================================================================*/
6396camera_metadata_t*
6397QCamera3HardwareInterface::translateFromHalMetadata(
6398 metadata_buffer_t *metadata,
6399 nsecs_t timestamp,
6400 int32_t request_id,
6401 const CameraMetadata& jpegMetadata,
6402 uint8_t pipeline_depth,
6403 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006404 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006405 /* DevCamDebug metadata translateFromHalMetadata argument */
6406 uint8_t DevCamDebug_meta_enable,
6407 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006408 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006409 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006410 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006411{
6412 CameraMetadata camMetadata;
6413 camera_metadata_t *resultMetadata;
6414
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006415 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006416 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6417 * Timestamp is needed because it's used for shutter notify calculation.
6418 * */
6419 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6420 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006421 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006422 }
6423
Thierry Strudel3d639192016-09-09 11:52:26 -07006424 if (jpegMetadata.entryCount())
6425 camMetadata.append(jpegMetadata);
6426
6427 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6428 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6429 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6430 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006431 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006432 if (mBatchSize == 0) {
6433 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6434 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6435 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006436
Samuel Ha68ba5172016-12-15 18:41:12 -08006437 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6438 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6439 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6440 // DevCamDebug metadata translateFromHalMetadata AF
6441 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6442 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6443 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6444 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6445 }
6446 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6447 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6448 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6449 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6450 }
6451 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6452 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6453 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6454 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6455 }
6456 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6457 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6458 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6459 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6460 }
6461 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6462 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6463 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6464 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6465 }
6466 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6467 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6468 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6469 *DevCamDebug_af_monitor_pdaf_target_pos;
6470 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6471 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6472 }
6473 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6474 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6475 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6476 *DevCamDebug_af_monitor_pdaf_confidence;
6477 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6478 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6479 }
6480 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6481 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6482 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6483 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6484 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6485 }
6486 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6487 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6488 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6489 *DevCamDebug_af_monitor_tof_target_pos;
6490 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6491 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6492 }
6493 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6494 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6495 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6496 *DevCamDebug_af_monitor_tof_confidence;
6497 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6498 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6499 }
6500 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6501 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6502 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6503 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6504 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6505 }
6506 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6507 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6508 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6509 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6510 &fwk_DevCamDebug_af_monitor_type_select, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6513 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6514 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6515 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6516 &fwk_DevCamDebug_af_monitor_refocus, 1);
6517 }
6518 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6519 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6520 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6521 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6522 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6523 }
6524 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6525 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6526 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6527 *DevCamDebug_af_search_pdaf_target_pos;
6528 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6529 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6530 }
6531 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6532 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6533 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6534 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6535 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6538 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6540 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6541 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6544 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6545 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6546 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6547 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6548 }
6549 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6550 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6551 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6552 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6553 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6554 }
6555 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6556 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6557 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6558 *DevCamDebug_af_search_tof_target_pos;
6559 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6560 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6561 }
6562 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6563 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6564 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6565 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6566 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6567 }
6568 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6569 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6570 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6571 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6572 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6573 }
6574 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6575 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6576 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6577 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6578 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6579 }
6580 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6581 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6582 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6583 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6584 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6585 }
6586 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6587 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6588 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6589 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6590 &fwk_DevCamDebug_af_search_type_select, 1);
6591 }
6592 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6593 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6594 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6595 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6596 &fwk_DevCamDebug_af_search_next_pos, 1);
6597 }
6598 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6599 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6600 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6601 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6602 &fwk_DevCamDebug_af_search_target_pos, 1);
6603 }
6604 // DevCamDebug metadata translateFromHalMetadata AEC
6605 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6606 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6607 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6608 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6609 }
6610 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6611 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6612 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6613 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6616 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6617 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6618 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6619 }
6620 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6621 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6622 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6623 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6624 }
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6626 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6627 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6628 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6629 }
6630 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6631 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6632 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6633 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6636 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6637 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6638 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6639 }
6640 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6641 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6642 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6643 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6644 }
Samuel Ha34229982017-02-17 13:51:11 -08006645 // DevCamDebug metadata translateFromHalMetadata zzHDR
6646 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6647 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6648 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6649 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6652 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006653 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006654 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6655 }
6656 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6657 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6658 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6659 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6662 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006663 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006664 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6665 }
6666 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6667 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6668 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6669 *DevCamDebug_aec_hdr_sensitivity_ratio;
6670 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6671 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6672 }
6673 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6674 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6675 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6676 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6677 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6678 }
6679 // DevCamDebug metadata translateFromHalMetadata ADRC
6680 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6681 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6682 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6683 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6684 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6685 }
6686 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6687 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6688 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6689 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6690 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6691 }
6692 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6693 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6694 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6695 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6696 }
6697 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6698 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6699 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6700 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6701 }
6702 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6703 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6704 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6705 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6706 }
6707 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6708 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6709 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6710 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6711 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006712 // DevCamDebug metadata translateFromHalMetadata AWB
6713 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6714 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6715 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6716 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6717 }
6718 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6719 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6720 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6721 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6722 }
6723 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6724 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6725 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6726 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6727 }
6728 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6729 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6730 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6731 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6732 }
6733 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6734 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6735 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6736 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6737 }
6738 }
6739 // atrace_end(ATRACE_TAG_ALWAYS);
6740
Thierry Strudel3d639192016-09-09 11:52:26 -07006741 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6742 int64_t fwk_frame_number = *frame_number;
6743 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6744 }
6745
6746 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6747 int32_t fps_range[2];
6748 fps_range[0] = (int32_t)float_range->min_fps;
6749 fps_range[1] = (int32_t)float_range->max_fps;
6750 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6751 fps_range, 2);
6752 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6753 fps_range[0], fps_range[1]);
6754 }
6755
6756 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6757 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6758 }
6759
6760 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6761 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6762 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6763 *sceneMode);
6764 if (NAME_NOT_FOUND != val) {
6765 uint8_t fwkSceneMode = (uint8_t)val;
6766 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6767 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6768 fwkSceneMode);
6769 }
6770 }
6771
6772 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6773 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6774 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6775 }
6776
6777 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6778 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6779 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6780 }
6781
6782 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6783 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6784 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6785 }
6786
6787 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6788 CAM_INTF_META_EDGE_MODE, metadata) {
6789 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6790 }
6791
6792 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6793 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6794 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6795 }
6796
6797 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6798 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6799 }
6800
6801 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6802 if (0 <= *flashState) {
6803 uint8_t fwk_flashState = (uint8_t) *flashState;
6804 if (!gCamCapability[mCameraId]->flash_available) {
6805 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6806 }
6807 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6808 }
6809 }
6810
6811 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6812 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6813 if (NAME_NOT_FOUND != val) {
6814 uint8_t fwk_flashMode = (uint8_t)val;
6815 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6816 }
6817 }
6818
6819 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6820 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6821 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6822 }
6823
6824 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6825 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6826 }
6827
6828 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6829 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6830 }
6831
6832 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6833 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6834 }
6835
6836 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6837 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6838 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6839 }
6840
6841 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6842 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6843 LOGD("fwk_videoStab = %d", fwk_videoStab);
6844 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6845 } else {
6846 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6847 // and so hardcoding the Video Stab result to OFF mode.
6848 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6849 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006850 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006851 }
6852
6853 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6854 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6855 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6856 }
6857
6858 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6859 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6860 }
6861
Thierry Strudel3d639192016-09-09 11:52:26 -07006862 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6863 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006864 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006865
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006866 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6867 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006868
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006869 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006870 blackLevelAppliedPattern->cam_black_level[0],
6871 blackLevelAppliedPattern->cam_black_level[1],
6872 blackLevelAppliedPattern->cam_black_level[2],
6873 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006874 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6875 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006876
6877#ifndef USE_HAL_3_3
6878 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306879 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006880 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306881 fwk_blackLevelInd[0] /= 16.0;
6882 fwk_blackLevelInd[1] /= 16.0;
6883 fwk_blackLevelInd[2] /= 16.0;
6884 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006885 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6886 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006887#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006888 }
6889
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006890#ifndef USE_HAL_3_3
6891 // Fixed whitelevel is used by ISP/Sensor
6892 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6893 &gCamCapability[mCameraId]->white_level, 1);
6894#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006895
6896 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6897 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6898 int32_t scalerCropRegion[4];
6899 scalerCropRegion[0] = hScalerCropRegion->left;
6900 scalerCropRegion[1] = hScalerCropRegion->top;
6901 scalerCropRegion[2] = hScalerCropRegion->width;
6902 scalerCropRegion[3] = hScalerCropRegion->height;
6903
6904 // Adjust crop region from sensor output coordinate system to active
6905 // array coordinate system.
6906 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6907 scalerCropRegion[2], scalerCropRegion[3]);
6908
6909 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6910 }
6911
6912 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6913 LOGD("sensorExpTime = %lld", *sensorExpTime);
6914 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6915 }
6916
6917 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6918 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6919 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6920 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6921 }
6922
6923 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6924 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6925 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6926 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6927 sensorRollingShutterSkew, 1);
6928 }
6929
6930 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6931 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6932 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6933
6934 //calculate the noise profile based on sensitivity
6935 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6936 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6937 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6938 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6939 noise_profile[i] = noise_profile_S;
6940 noise_profile[i+1] = noise_profile_O;
6941 }
6942 LOGD("noise model entry (S, O) is (%f, %f)",
6943 noise_profile_S, noise_profile_O);
6944 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6945 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6946 }
6947
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006948#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006949 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006950 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006951 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006952 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006953 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6954 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6955 }
6956 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006957#endif
6958
Thierry Strudel3d639192016-09-09 11:52:26 -07006959 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6960 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6961 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6962 }
6963
6964 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6965 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6966 *faceDetectMode);
6967 if (NAME_NOT_FOUND != val) {
6968 uint8_t fwk_faceDetectMode = (uint8_t)val;
6969 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6970
6971 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6972 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6973 CAM_INTF_META_FACE_DETECTION, metadata) {
6974 uint8_t numFaces = MIN(
6975 faceDetectionInfo->num_faces_detected, MAX_ROI);
6976 int32_t faceIds[MAX_ROI];
6977 uint8_t faceScores[MAX_ROI];
6978 int32_t faceRectangles[MAX_ROI * 4];
6979 int32_t faceLandmarks[MAX_ROI * 6];
6980 size_t j = 0, k = 0;
6981
6982 for (size_t i = 0; i < numFaces; i++) {
6983 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6984 // Adjust crop region from sensor output coordinate system to active
6985 // array coordinate system.
6986 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6987 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6988 rect.width, rect.height);
6989
6990 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6991 faceRectangles+j, -1);
6992
6993 j+= 4;
6994 }
6995 if (numFaces <= 0) {
6996 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6997 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6998 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6999 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7000 }
7001
7002 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7003 numFaces);
7004 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7005 faceRectangles, numFaces * 4U);
7006 if (fwk_faceDetectMode ==
7007 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7008 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7009 CAM_INTF_META_FACE_LANDMARK, metadata) {
7010
7011 for (size_t i = 0; i < numFaces; i++) {
7012 // Map the co-ordinate sensor output coordinate system to active
7013 // array coordinate system.
7014 mCropRegionMapper.toActiveArray(
7015 landmarks->face_landmarks[i].left_eye_center.x,
7016 landmarks->face_landmarks[i].left_eye_center.y);
7017 mCropRegionMapper.toActiveArray(
7018 landmarks->face_landmarks[i].right_eye_center.x,
7019 landmarks->face_landmarks[i].right_eye_center.y);
7020 mCropRegionMapper.toActiveArray(
7021 landmarks->face_landmarks[i].mouth_center.x,
7022 landmarks->face_landmarks[i].mouth_center.y);
7023
7024 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007025 k+= TOTAL_LANDMARK_INDICES;
7026 }
7027 } else {
7028 for (size_t i = 0; i < numFaces; i++) {
7029 setInvalidLandmarks(faceLandmarks+k);
7030 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007031 }
7032 }
7033
7034 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7035 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7036 faceLandmarks, numFaces * 6U);
7037 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007038 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7039 CAM_INTF_META_FACE_BLINK, metadata) {
7040 uint8_t detected[MAX_ROI];
7041 uint8_t degree[MAX_ROI * 2];
7042 for (size_t i = 0; i < numFaces; i++) {
7043 detected[i] = blinks->blink[i].blink_detected;
7044 degree[2 * i] = blinks->blink[i].left_blink;
7045 degree[2 * i + 1] = blinks->blink[i].right_blink;
7046 }
7047 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7048 detected, numFaces);
7049 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7050 degree, numFaces * 2);
7051 }
7052 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7053 CAM_INTF_META_FACE_SMILE, metadata) {
7054 uint8_t degree[MAX_ROI];
7055 uint8_t confidence[MAX_ROI];
7056 for (size_t i = 0; i < numFaces; i++) {
7057 degree[i] = smiles->smile[i].smile_degree;
7058 confidence[i] = smiles->smile[i].smile_confidence;
7059 }
7060 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7061 degree, numFaces);
7062 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7063 confidence, numFaces);
7064 }
7065 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7066 CAM_INTF_META_FACE_GAZE, metadata) {
7067 int8_t angle[MAX_ROI];
7068 int32_t direction[MAX_ROI * 3];
7069 int8_t degree[MAX_ROI * 2];
7070 for (size_t i = 0; i < numFaces; i++) {
7071 angle[i] = gazes->gaze[i].gaze_angle;
7072 direction[3 * i] = gazes->gaze[i].updown_dir;
7073 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7074 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7075 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7076 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7077 }
7078 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7079 (uint8_t *)angle, numFaces);
7080 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7081 direction, numFaces * 3);
7082 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7083 (uint8_t *)degree, numFaces * 2);
7084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007085 }
7086 }
7087 }
7088 }
7089
7090 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7091 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007092 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007093 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007094 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007095
Shuzhen Wang14415f52016-11-16 18:26:18 -08007096 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7097 histogramBins = *histBins;
7098 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7099 }
7100
7101 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007102 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7103 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007104 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007105
7106 switch (stats_data->type) {
7107 case CAM_HISTOGRAM_TYPE_BAYER:
7108 switch (stats_data->bayer_stats.data_type) {
7109 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007110 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7111 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007112 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007113 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7114 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007115 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007116 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7117 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007118 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007119 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007120 case CAM_STATS_CHANNEL_R:
7121 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007122 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7123 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007124 }
7125 break;
7126 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007127 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007128 break;
7129 }
7130
Shuzhen Wang14415f52016-11-16 18:26:18 -08007131 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 }
7133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007134 }
7135
7136 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7137 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7138 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7139 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7140 }
7141
7142 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7143 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7144 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7145 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7146 }
7147
7148 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7149 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7150 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7151 CAM_MAX_SHADING_MAP_HEIGHT);
7152 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7153 CAM_MAX_SHADING_MAP_WIDTH);
7154 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7155 lensShadingMap->lens_shading, 4U * map_width * map_height);
7156 }
7157
7158 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7159 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7160 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7161 }
7162
7163 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7164 //Populate CAM_INTF_META_TONEMAP_CURVES
7165 /* ch0 = G, ch 1 = B, ch 2 = R*/
7166 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7167 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7168 tonemap->tonemap_points_cnt,
7169 CAM_MAX_TONEMAP_CURVE_SIZE);
7170 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7171 }
7172
7173 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7174 &tonemap->curves[0].tonemap_points[0][0],
7175 tonemap->tonemap_points_cnt * 2);
7176
7177 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7178 &tonemap->curves[1].tonemap_points[0][0],
7179 tonemap->tonemap_points_cnt * 2);
7180
7181 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7182 &tonemap->curves[2].tonemap_points[0][0],
7183 tonemap->tonemap_points_cnt * 2);
7184 }
7185
7186 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7187 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7188 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7189 CC_GAIN_MAX);
7190 }
7191
7192 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7193 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7194 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7195 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7196 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7197 }
7198
7199 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7200 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7201 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7202 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7203 toneCurve->tonemap_points_cnt,
7204 CAM_MAX_TONEMAP_CURVE_SIZE);
7205 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7206 }
7207 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7208 (float*)toneCurve->curve.tonemap_points,
7209 toneCurve->tonemap_points_cnt * 2);
7210 }
7211
7212 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7213 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7214 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7215 predColorCorrectionGains->gains, 4);
7216 }
7217
7218 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7219 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7220 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7221 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7222 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7223 }
7224
7225 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7226 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7227 }
7228
7229 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7230 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7231 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7232 }
7233
7234 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7235 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7236 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7237 }
7238
7239 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7240 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7241 *effectMode);
7242 if (NAME_NOT_FOUND != val) {
7243 uint8_t fwk_effectMode = (uint8_t)val;
7244 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7245 }
7246 }
7247
7248 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7249 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7250 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7251 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7252 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7253 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7254 }
7255 int32_t fwk_testPatternData[4];
7256 fwk_testPatternData[0] = testPatternData->r;
7257 fwk_testPatternData[3] = testPatternData->b;
7258 switch (gCamCapability[mCameraId]->color_arrangement) {
7259 case CAM_FILTER_ARRANGEMENT_RGGB:
7260 case CAM_FILTER_ARRANGEMENT_GRBG:
7261 fwk_testPatternData[1] = testPatternData->gr;
7262 fwk_testPatternData[2] = testPatternData->gb;
7263 break;
7264 case CAM_FILTER_ARRANGEMENT_GBRG:
7265 case CAM_FILTER_ARRANGEMENT_BGGR:
7266 fwk_testPatternData[2] = testPatternData->gr;
7267 fwk_testPatternData[1] = testPatternData->gb;
7268 break;
7269 default:
7270 LOGE("color arrangement %d is not supported",
7271 gCamCapability[mCameraId]->color_arrangement);
7272 break;
7273 }
7274 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7275 }
7276
7277 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7278 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7279 }
7280
7281 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7282 String8 str((const char *)gps_methods);
7283 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7284 }
7285
7286 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7287 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7288 }
7289
7290 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7291 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7292 }
7293
7294 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7295 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7296 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7297 }
7298
7299 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7300 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7301 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7302 }
7303
7304 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7305 int32_t fwk_thumb_size[2];
7306 fwk_thumb_size[0] = thumb_size->width;
7307 fwk_thumb_size[1] = thumb_size->height;
7308 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7309 }
7310
7311 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7312 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7313 privateData,
7314 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7315 }
7316
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007317 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007318 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007319 meteringMode, 1);
7320 }
7321
Thierry Strudel54dc9782017-02-15 12:12:10 -08007322 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7323 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7324 LOGD("hdr_scene_data: %d %f\n",
7325 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7326 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7327 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7328 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7329 &isHdr, 1);
7330 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7331 &isHdrConfidence, 1);
7332 }
7333
7334
7335
Thierry Strudel3d639192016-09-09 11:52:26 -07007336 if (metadata->is_tuning_params_valid) {
7337 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7338 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7339 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7340
7341
7342 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7343 sizeof(uint32_t));
7344 data += sizeof(uint32_t);
7345
7346 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7347 sizeof(uint32_t));
7348 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7349 data += sizeof(uint32_t);
7350
7351 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7352 sizeof(uint32_t));
7353 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7354 data += sizeof(uint32_t);
7355
7356 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7357 sizeof(uint32_t));
7358 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7359 data += sizeof(uint32_t);
7360
7361 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7362 sizeof(uint32_t));
7363 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7364 data += sizeof(uint32_t);
7365
7366 metadata->tuning_params.tuning_mod3_data_size = 0;
7367 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7368 sizeof(uint32_t));
7369 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7370 data += sizeof(uint32_t);
7371
7372 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7373 TUNING_SENSOR_DATA_MAX);
7374 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7375 count);
7376 data += count;
7377
7378 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7379 TUNING_VFE_DATA_MAX);
7380 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7381 count);
7382 data += count;
7383
7384 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7385 TUNING_CPP_DATA_MAX);
7386 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7387 count);
7388 data += count;
7389
7390 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7391 TUNING_CAC_DATA_MAX);
7392 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7393 count);
7394 data += count;
7395
7396 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7397 (int32_t *)(void *)tuning_meta_data_blob,
7398 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7399 }
7400
7401 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7402 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7403 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7404 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7405 NEUTRAL_COL_POINTS);
7406 }
7407
7408 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7409 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7410 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7411 }
7412
7413 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7414 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7415 // Adjust crop region from sensor output coordinate system to active
7416 // array coordinate system.
7417 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7418 hAeRegions->rect.width, hAeRegions->rect.height);
7419
7420 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7421 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7422 REGIONS_TUPLE_COUNT);
7423 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7424 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7425 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7426 hAeRegions->rect.height);
7427 }
7428
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007429 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7430 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7431 if (NAME_NOT_FOUND != val) {
7432 uint8_t fwkAfMode = (uint8_t)val;
7433 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7434 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7435 } else {
7436 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7437 val);
7438 }
7439 }
7440
Thierry Strudel3d639192016-09-09 11:52:26 -07007441 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7442 uint8_t fwk_afState = (uint8_t) *afState;
7443 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007444 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007445 }
7446
7447 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7448 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7449 }
7450
7451 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7452 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7453 }
7454
7455 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7456 uint8_t fwk_lensState = *lensState;
7457 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7458 }
7459
Thierry Strudel3d639192016-09-09 11:52:26 -07007460
7461 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007462 uint32_t ab_mode = *hal_ab_mode;
7463 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7464 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7465 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7466 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007467 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007468 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007469 if (NAME_NOT_FOUND != val) {
7470 uint8_t fwk_ab_mode = (uint8_t)val;
7471 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7472 }
7473 }
7474
7475 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7476 int val = lookupFwkName(SCENE_MODES_MAP,
7477 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7478 if (NAME_NOT_FOUND != val) {
7479 uint8_t fwkBestshotMode = (uint8_t)val;
7480 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7481 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7482 } else {
7483 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7484 }
7485 }
7486
7487 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7488 uint8_t fwk_mode = (uint8_t) *mode;
7489 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7490 }
7491
7492 /* Constant metadata values to be update*/
7493 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7494 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7495
7496 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7497 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7498
7499 int32_t hotPixelMap[2];
7500 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7501
7502 // CDS
7503 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7504 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7505 }
7506
Thierry Strudel04e026f2016-10-10 11:27:36 -07007507 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7508 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007509 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007510 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7511 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7512 } else {
7513 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7514 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007515
7516 if(fwk_hdr != curr_hdr_state) {
7517 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7518 if(fwk_hdr)
7519 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7520 else
7521 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7522 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007523 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7524 }
7525
Thierry Strudel54dc9782017-02-15 12:12:10 -08007526 //binning correction
7527 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7528 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7529 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7530 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7531 }
7532
Thierry Strudel04e026f2016-10-10 11:27:36 -07007533 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007534 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007535 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7536 int8_t is_ir_on = 0;
7537
7538 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7539 if(is_ir_on != curr_ir_state) {
7540 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7541 if(is_ir_on)
7542 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7543 else
7544 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7545 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007546 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007547 }
7548
Thierry Strudel269c81a2016-10-12 12:13:59 -07007549 // AEC SPEED
7550 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7551 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7552 }
7553
7554 // AWB SPEED
7555 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7556 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7557 }
7558
Thierry Strudel3d639192016-09-09 11:52:26 -07007559 // TNR
7560 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7561 uint8_t tnr_enable = tnr->denoise_enable;
7562 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007563 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7564 int8_t is_tnr_on = 0;
7565
7566 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7567 if(is_tnr_on != curr_tnr_state) {
7568 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7569 if(is_tnr_on)
7570 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7571 else
7572 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007574
7575 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7576 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7577 }
7578
7579 // Reprocess crop data
7580 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7581 uint8_t cnt = crop_data->num_of_streams;
7582 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7583 // mm-qcamera-daemon only posts crop_data for streams
7584 // not linked to pproc. So no valid crop metadata is not
7585 // necessarily an error case.
7586 LOGD("No valid crop metadata entries");
7587 } else {
7588 uint32_t reproc_stream_id;
7589 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7590 LOGD("No reprocessible stream found, ignore crop data");
7591 } else {
7592 int rc = NO_ERROR;
7593 Vector<int32_t> roi_map;
7594 int32_t *crop = new int32_t[cnt*4];
7595 if (NULL == crop) {
7596 rc = NO_MEMORY;
7597 }
7598 if (NO_ERROR == rc) {
7599 int32_t streams_found = 0;
7600 for (size_t i = 0; i < cnt; i++) {
7601 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7602 if (pprocDone) {
7603 // HAL already does internal reprocessing,
7604 // either via reprocessing before JPEG encoding,
7605 // or offline postprocessing for pproc bypass case.
7606 crop[0] = 0;
7607 crop[1] = 0;
7608 crop[2] = mInputStreamInfo.dim.width;
7609 crop[3] = mInputStreamInfo.dim.height;
7610 } else {
7611 crop[0] = crop_data->crop_info[i].crop.left;
7612 crop[1] = crop_data->crop_info[i].crop.top;
7613 crop[2] = crop_data->crop_info[i].crop.width;
7614 crop[3] = crop_data->crop_info[i].crop.height;
7615 }
7616 roi_map.add(crop_data->crop_info[i].roi_map.left);
7617 roi_map.add(crop_data->crop_info[i].roi_map.top);
7618 roi_map.add(crop_data->crop_info[i].roi_map.width);
7619 roi_map.add(crop_data->crop_info[i].roi_map.height);
7620 streams_found++;
7621 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7622 crop[0], crop[1], crop[2], crop[3]);
7623 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7624 crop_data->crop_info[i].roi_map.left,
7625 crop_data->crop_info[i].roi_map.top,
7626 crop_data->crop_info[i].roi_map.width,
7627 crop_data->crop_info[i].roi_map.height);
7628 break;
7629
7630 }
7631 }
7632 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7633 &streams_found, 1);
7634 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7635 crop, (size_t)(streams_found * 4));
7636 if (roi_map.array()) {
7637 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7638 roi_map.array(), roi_map.size());
7639 }
7640 }
7641 if (crop) {
7642 delete [] crop;
7643 }
7644 }
7645 }
7646 }
7647
7648 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7649 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7650 // so hardcoding the CAC result to OFF mode.
7651 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7652 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7653 } else {
7654 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7655 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7656 *cacMode);
7657 if (NAME_NOT_FOUND != val) {
7658 uint8_t resultCacMode = (uint8_t)val;
7659 // check whether CAC result from CB is equal to Framework set CAC mode
7660 // If not equal then set the CAC mode came in corresponding request
7661 if (fwk_cacMode != resultCacMode) {
7662 resultCacMode = fwk_cacMode;
7663 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007664 //Check if CAC is disabled by property
7665 if (m_cacModeDisabled) {
7666 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7667 }
7668
Thierry Strudel3d639192016-09-09 11:52:26 -07007669 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7670 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7671 } else {
7672 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7673 }
7674 }
7675 }
7676
7677 // Post blob of cam_cds_data through vendor tag.
7678 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7679 uint8_t cnt = cdsInfo->num_of_streams;
7680 cam_cds_data_t cdsDataOverride;
7681 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7682 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7683 cdsDataOverride.num_of_streams = 1;
7684 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7685 uint32_t reproc_stream_id;
7686 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7687 LOGD("No reprocessible stream found, ignore cds data");
7688 } else {
7689 for (size_t i = 0; i < cnt; i++) {
7690 if (cdsInfo->cds_info[i].stream_id ==
7691 reproc_stream_id) {
7692 cdsDataOverride.cds_info[0].cds_enable =
7693 cdsInfo->cds_info[i].cds_enable;
7694 break;
7695 }
7696 }
7697 }
7698 } else {
7699 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7700 }
7701 camMetadata.update(QCAMERA3_CDS_INFO,
7702 (uint8_t *)&cdsDataOverride,
7703 sizeof(cam_cds_data_t));
7704 }
7705
7706 // Ldaf calibration data
7707 if (!mLdafCalibExist) {
7708 IF_META_AVAILABLE(uint32_t, ldafCalib,
7709 CAM_INTF_META_LDAF_EXIF, metadata) {
7710 mLdafCalibExist = true;
7711 mLdafCalib[0] = ldafCalib[0];
7712 mLdafCalib[1] = ldafCalib[1];
7713 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7714 ldafCalib[0], ldafCalib[1]);
7715 }
7716 }
7717
Thierry Strudel54dc9782017-02-15 12:12:10 -08007718 // EXIF debug data through vendor tag
7719 /*
7720 * Mobicat Mask can assume 3 values:
7721 * 1 refers to Mobicat data,
7722 * 2 refers to Stats Debug and Exif Debug Data
7723 * 3 refers to Mobicat and Stats Debug Data
7724 * We want to make sure that we are sending Exif debug data
7725 * only when Mobicat Mask is 2.
7726 */
7727 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7728 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7729 (uint8_t *)(void *)mExifParams.debug_params,
7730 sizeof(mm_jpeg_debug_exif_params_t));
7731 }
7732
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007733 // Reprocess and DDM debug data through vendor tag
7734 cam_reprocess_info_t repro_info;
7735 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007736 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7737 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007738 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007739 }
7740 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7741 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007742 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007743 }
7744 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7745 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007746 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007747 }
7748 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7749 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007750 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007751 }
7752 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7753 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007754 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 }
7756 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007757 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007758 }
7759 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7760 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007761 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007762 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007763 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7764 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7765 }
7766 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7767 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7768 }
7769 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7770 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007771
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007772 // INSTANT AEC MODE
7773 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7774 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7775 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7776 }
7777
Shuzhen Wange763e802016-03-31 10:24:29 -07007778 // AF scene change
7779 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7780 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7781 }
7782
Thierry Strudel3d639192016-09-09 11:52:26 -07007783 resultMetadata = camMetadata.release();
7784 return resultMetadata;
7785}
7786
7787/*===========================================================================
7788 * FUNCTION : saveExifParams
7789 *
7790 * DESCRIPTION:
7791 *
7792 * PARAMETERS :
7793 * @metadata : metadata information from callback
7794 *
7795 * RETURN : none
7796 *
7797 *==========================================================================*/
7798void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7799{
7800 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7801 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7802 if (mExifParams.debug_params) {
7803 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7804 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7805 }
7806 }
7807 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7808 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7809 if (mExifParams.debug_params) {
7810 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7811 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7812 }
7813 }
7814 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7815 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7816 if (mExifParams.debug_params) {
7817 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7818 mExifParams.debug_params->af_debug_params_valid = TRUE;
7819 }
7820 }
7821 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7822 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7823 if (mExifParams.debug_params) {
7824 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7825 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7826 }
7827 }
7828 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7829 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7830 if (mExifParams.debug_params) {
7831 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7832 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7833 }
7834 }
7835 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7836 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7837 if (mExifParams.debug_params) {
7838 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7839 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7840 }
7841 }
7842 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7843 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7844 if (mExifParams.debug_params) {
7845 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7846 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7847 }
7848 }
7849 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7850 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7851 if (mExifParams.debug_params) {
7852 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7853 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7854 }
7855 }
7856}
7857
7858/*===========================================================================
7859 * FUNCTION : get3AExifParams
7860 *
7861 * DESCRIPTION:
7862 *
7863 * PARAMETERS : none
7864 *
7865 *
7866 * RETURN : mm_jpeg_exif_params_t
7867 *
7868 *==========================================================================*/
7869mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7870{
7871 return mExifParams;
7872}
7873
7874/*===========================================================================
7875 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7876 *
7877 * DESCRIPTION:
7878 *
7879 * PARAMETERS :
7880 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007881 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7882 * urgent metadata in a batch. Always true for
7883 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007884 *
7885 * RETURN : camera_metadata_t*
7886 * metadata in a format specified by fwk
7887 *==========================================================================*/
7888camera_metadata_t*
7889QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007890 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007891{
7892 CameraMetadata camMetadata;
7893 camera_metadata_t *resultMetadata;
7894
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007895 if (!lastUrgentMetadataInBatch) {
7896 /* In batch mode, use empty metadata if this is not the last in batch
7897 */
7898 resultMetadata = allocate_camera_metadata(0, 0);
7899 return resultMetadata;
7900 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007901
7902 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7903 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7904 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7905 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7906 }
7907
7908 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7909 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7910 &aecTrigger->trigger, 1);
7911 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7912 &aecTrigger->trigger_id, 1);
7913 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7914 aecTrigger->trigger);
7915 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7916 aecTrigger->trigger_id);
7917 }
7918
7919 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7920 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7921 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7922 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7923 }
7924
Thierry Strudel3d639192016-09-09 11:52:26 -07007925 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7926 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7927 &af_trigger->trigger, 1);
7928 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7929 af_trigger->trigger);
7930 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7931 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7932 af_trigger->trigger_id);
7933 }
7934
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007935 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7936 /*af regions*/
7937 int32_t afRegions[REGIONS_TUPLE_COUNT];
7938 // Adjust crop region from sensor output coordinate system to active
7939 // array coordinate system.
7940 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7941 hAfRegions->rect.width, hAfRegions->rect.height);
7942
7943 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7944 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7945 REGIONS_TUPLE_COUNT);
7946 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7947 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7948 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7949 hAfRegions->rect.height);
7950 }
7951
Shuzhen Wangcc386c52017-03-29 09:28:08 -07007952 // AF region confidence
7953 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
7954 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
7955 }
7956
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7958 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7959 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7960 if (NAME_NOT_FOUND != val) {
7961 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7962 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7963 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7964 } else {
7965 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7966 }
7967 }
7968
7969 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7970 uint32_t aeMode = CAM_AE_MODE_MAX;
7971 int32_t flashMode = CAM_FLASH_MODE_MAX;
7972 int32_t redeye = -1;
7973 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7974 aeMode = *pAeMode;
7975 }
7976 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7977 flashMode = *pFlashMode;
7978 }
7979 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7980 redeye = *pRedeye;
7981 }
7982
7983 if (1 == redeye) {
7984 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7985 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7986 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7987 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7988 flashMode);
7989 if (NAME_NOT_FOUND != val) {
7990 fwk_aeMode = (uint8_t)val;
7991 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7992 } else {
7993 LOGE("Unsupported flash mode %d", flashMode);
7994 }
7995 } else if (aeMode == CAM_AE_MODE_ON) {
7996 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7997 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7998 } else if (aeMode == CAM_AE_MODE_OFF) {
7999 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8000 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8001 } else {
8002 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8003 "flashMode:%d, aeMode:%u!!!",
8004 redeye, flashMode, aeMode);
8005 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008006 if (mInstantAEC) {
8007 // Increment frame Idx count untill a bound reached for instant AEC.
8008 mInstantAecFrameIdxCount++;
8009 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8010 CAM_INTF_META_AEC_INFO, metadata) {
8011 LOGH("ae_params->settled = %d",ae_params->settled);
8012 // If AEC settled, or if number of frames reached bound value,
8013 // should reset instant AEC.
8014 if (ae_params->settled ||
8015 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8016 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8017 mInstantAEC = false;
8018 mResetInstantAEC = true;
8019 mInstantAecFrameIdxCount = 0;
8020 }
8021 }
8022 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008023 resultMetadata = camMetadata.release();
8024 return resultMetadata;
8025}
8026
8027/*===========================================================================
8028 * FUNCTION : dumpMetadataToFile
8029 *
8030 * DESCRIPTION: Dumps tuning metadata to file system
8031 *
8032 * PARAMETERS :
8033 * @meta : tuning metadata
8034 * @dumpFrameCount : current dump frame count
8035 * @enabled : Enable mask
8036 *
8037 *==========================================================================*/
8038void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8039 uint32_t &dumpFrameCount,
8040 bool enabled,
8041 const char *type,
8042 uint32_t frameNumber)
8043{
8044 //Some sanity checks
8045 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8046 LOGE("Tuning sensor data size bigger than expected %d: %d",
8047 meta.tuning_sensor_data_size,
8048 TUNING_SENSOR_DATA_MAX);
8049 return;
8050 }
8051
8052 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8053 LOGE("Tuning VFE data size bigger than expected %d: %d",
8054 meta.tuning_vfe_data_size,
8055 TUNING_VFE_DATA_MAX);
8056 return;
8057 }
8058
8059 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8060 LOGE("Tuning CPP data size bigger than expected %d: %d",
8061 meta.tuning_cpp_data_size,
8062 TUNING_CPP_DATA_MAX);
8063 return;
8064 }
8065
8066 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8067 LOGE("Tuning CAC data size bigger than expected %d: %d",
8068 meta.tuning_cac_data_size,
8069 TUNING_CAC_DATA_MAX);
8070 return;
8071 }
8072 //
8073
8074 if(enabled){
8075 char timeBuf[FILENAME_MAX];
8076 char buf[FILENAME_MAX];
8077 memset(buf, 0, sizeof(buf));
8078 memset(timeBuf, 0, sizeof(timeBuf));
8079 time_t current_time;
8080 struct tm * timeinfo;
8081 time (&current_time);
8082 timeinfo = localtime (&current_time);
8083 if (timeinfo != NULL) {
8084 strftime (timeBuf, sizeof(timeBuf),
8085 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8086 }
8087 String8 filePath(timeBuf);
8088 snprintf(buf,
8089 sizeof(buf),
8090 "%dm_%s_%d.bin",
8091 dumpFrameCount,
8092 type,
8093 frameNumber);
8094 filePath.append(buf);
8095 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8096 if (file_fd >= 0) {
8097 ssize_t written_len = 0;
8098 meta.tuning_data_version = TUNING_DATA_VERSION;
8099 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8100 written_len += write(file_fd, data, sizeof(uint32_t));
8101 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8102 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8103 written_len += write(file_fd, data, sizeof(uint32_t));
8104 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8105 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8106 written_len += write(file_fd, data, sizeof(uint32_t));
8107 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8108 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8109 written_len += write(file_fd, data, sizeof(uint32_t));
8110 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8111 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8112 written_len += write(file_fd, data, sizeof(uint32_t));
8113 meta.tuning_mod3_data_size = 0;
8114 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8115 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8116 written_len += write(file_fd, data, sizeof(uint32_t));
8117 size_t total_size = meta.tuning_sensor_data_size;
8118 data = (void *)((uint8_t *)&meta.data);
8119 written_len += write(file_fd, data, total_size);
8120 total_size = meta.tuning_vfe_data_size;
8121 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8122 written_len += write(file_fd, data, total_size);
8123 total_size = meta.tuning_cpp_data_size;
8124 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8125 written_len += write(file_fd, data, total_size);
8126 total_size = meta.tuning_cac_data_size;
8127 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8128 written_len += write(file_fd, data, total_size);
8129 close(file_fd);
8130 }else {
8131 LOGE("fail to open file for metadata dumping");
8132 }
8133 }
8134}
8135
8136/*===========================================================================
8137 * FUNCTION : cleanAndSortStreamInfo
8138 *
8139 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8140 * and sort them such that raw stream is at the end of the list
8141 * This is a workaround for camera daemon constraint.
8142 *
8143 * PARAMETERS : None
8144 *
8145 *==========================================================================*/
8146void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8147{
8148 List<stream_info_t *> newStreamInfo;
8149
8150 /*clean up invalid streams*/
8151 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8152 it != mStreamInfo.end();) {
8153 if(((*it)->status) == INVALID){
8154 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8155 delete channel;
8156 free(*it);
8157 it = mStreamInfo.erase(it);
8158 } else {
8159 it++;
8160 }
8161 }
8162
8163 // Move preview/video/callback/snapshot streams into newList
8164 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8165 it != mStreamInfo.end();) {
8166 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8167 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8168 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8169 newStreamInfo.push_back(*it);
8170 it = mStreamInfo.erase(it);
8171 } else
8172 it++;
8173 }
8174 // Move raw streams into newList
8175 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8176 it != mStreamInfo.end();) {
8177 newStreamInfo.push_back(*it);
8178 it = mStreamInfo.erase(it);
8179 }
8180
8181 mStreamInfo = newStreamInfo;
8182}
8183
8184/*===========================================================================
8185 * FUNCTION : extractJpegMetadata
8186 *
8187 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8188 * JPEG metadata is cached in HAL, and return as part of capture
8189 * result when metadata is returned from camera daemon.
8190 *
8191 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8192 * @request: capture request
8193 *
8194 *==========================================================================*/
8195void QCamera3HardwareInterface::extractJpegMetadata(
8196 CameraMetadata& jpegMetadata,
8197 const camera3_capture_request_t *request)
8198{
8199 CameraMetadata frame_settings;
8200 frame_settings = request->settings;
8201
8202 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8203 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8204 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8205 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8206
8207 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8208 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8209 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8210 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8211
8212 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8213 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8214 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8215 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8216
8217 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8218 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8219 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8220 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8221
8222 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8223 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8224 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8225 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8226
8227 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8228 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8229 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8230 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8231
8232 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8233 int32_t thumbnail_size[2];
8234 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8235 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8236 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8237 int32_t orientation =
8238 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008239 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008240 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8241 int32_t temp;
8242 temp = thumbnail_size[0];
8243 thumbnail_size[0] = thumbnail_size[1];
8244 thumbnail_size[1] = temp;
8245 }
8246 }
8247 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8248 thumbnail_size,
8249 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8250 }
8251
8252}
8253
8254/*===========================================================================
8255 * FUNCTION : convertToRegions
8256 *
8257 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8258 *
8259 * PARAMETERS :
8260 * @rect : cam_rect_t struct to convert
8261 * @region : int32_t destination array
8262 * @weight : if we are converting from cam_area_t, weight is valid
8263 * else weight = -1
8264 *
8265 *==========================================================================*/
8266void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8267 int32_t *region, int weight)
8268{
8269 region[0] = rect.left;
8270 region[1] = rect.top;
8271 region[2] = rect.left + rect.width;
8272 region[3] = rect.top + rect.height;
8273 if (weight > -1) {
8274 region[4] = weight;
8275 }
8276}
8277
8278/*===========================================================================
8279 * FUNCTION : convertFromRegions
8280 *
8281 * DESCRIPTION: helper method to convert from array to cam_rect_t
8282 *
8283 * PARAMETERS :
8284 * @rect : cam_rect_t struct to convert
8285 * @region : int32_t destination array
8286 * @weight : if we are converting from cam_area_t, weight is valid
8287 * else weight = -1
8288 *
8289 *==========================================================================*/
8290void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008291 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008292{
Thierry Strudel3d639192016-09-09 11:52:26 -07008293 int32_t x_min = frame_settings.find(tag).data.i32[0];
8294 int32_t y_min = frame_settings.find(tag).data.i32[1];
8295 int32_t x_max = frame_settings.find(tag).data.i32[2];
8296 int32_t y_max = frame_settings.find(tag).data.i32[3];
8297 roi.weight = frame_settings.find(tag).data.i32[4];
8298 roi.rect.left = x_min;
8299 roi.rect.top = y_min;
8300 roi.rect.width = x_max - x_min;
8301 roi.rect.height = y_max - y_min;
8302}
8303
8304/*===========================================================================
8305 * FUNCTION : resetIfNeededROI
8306 *
8307 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8308 * crop region
8309 *
8310 * PARAMETERS :
8311 * @roi : cam_area_t struct to resize
8312 * @scalerCropRegion : cam_crop_region_t region to compare against
8313 *
8314 *
8315 *==========================================================================*/
8316bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8317 const cam_crop_region_t* scalerCropRegion)
8318{
8319 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8320 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8321 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8322 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8323
8324 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8325 * without having this check the calculations below to validate if the roi
8326 * is inside scalar crop region will fail resulting in the roi not being
8327 * reset causing algorithm to continue to use stale roi window
8328 */
8329 if (roi->weight == 0) {
8330 return true;
8331 }
8332
8333 if ((roi_x_max < scalerCropRegion->left) ||
8334 // right edge of roi window is left of scalar crop's left edge
8335 (roi_y_max < scalerCropRegion->top) ||
8336 // bottom edge of roi window is above scalar crop's top edge
8337 (roi->rect.left > crop_x_max) ||
8338 // left edge of roi window is beyond(right) of scalar crop's right edge
8339 (roi->rect.top > crop_y_max)){
8340 // top edge of roi windo is above scalar crop's top edge
8341 return false;
8342 }
8343 if (roi->rect.left < scalerCropRegion->left) {
8344 roi->rect.left = scalerCropRegion->left;
8345 }
8346 if (roi->rect.top < scalerCropRegion->top) {
8347 roi->rect.top = scalerCropRegion->top;
8348 }
8349 if (roi_x_max > crop_x_max) {
8350 roi_x_max = crop_x_max;
8351 }
8352 if (roi_y_max > crop_y_max) {
8353 roi_y_max = crop_y_max;
8354 }
8355 roi->rect.width = roi_x_max - roi->rect.left;
8356 roi->rect.height = roi_y_max - roi->rect.top;
8357 return true;
8358}
8359
8360/*===========================================================================
8361 * FUNCTION : convertLandmarks
8362 *
8363 * DESCRIPTION: helper method to extract the landmarks from face detection info
8364 *
8365 * PARAMETERS :
8366 * @landmark_data : input landmark data to be converted
8367 * @landmarks : int32_t destination array
8368 *
8369 *
8370 *==========================================================================*/
8371void QCamera3HardwareInterface::convertLandmarks(
8372 cam_face_landmarks_info_t landmark_data,
8373 int32_t *landmarks)
8374{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008375 if (landmark_data.is_left_eye_valid) {
8376 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8377 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8378 } else {
8379 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8380 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8381 }
8382
8383 if (landmark_data.is_right_eye_valid) {
8384 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8385 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8386 } else {
8387 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8388 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8389 }
8390
8391 if (landmark_data.is_mouth_valid) {
8392 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8393 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8394 } else {
8395 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8396 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8397 }
8398}
8399
8400/*===========================================================================
8401 * FUNCTION : setInvalidLandmarks
8402 *
8403 * DESCRIPTION: helper method to set invalid landmarks
8404 *
8405 * PARAMETERS :
8406 * @landmarks : int32_t destination array
8407 *
8408 *
8409 *==========================================================================*/
8410void QCamera3HardwareInterface::setInvalidLandmarks(
8411 int32_t *landmarks)
8412{
8413 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8414 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8415 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8416 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8417 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8418 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008419}
8420
8421#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008422
8423/*===========================================================================
8424 * FUNCTION : getCapabilities
8425 *
8426 * DESCRIPTION: query camera capability from back-end
8427 *
8428 * PARAMETERS :
8429 * @ops : mm-interface ops structure
8430 * @cam_handle : camera handle for which we need capability
8431 *
8432 * RETURN : ptr type of capability structure
8433 * capability for success
8434 * NULL for failure
8435 *==========================================================================*/
8436cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8437 uint32_t cam_handle)
8438{
8439 int rc = NO_ERROR;
8440 QCamera3HeapMemory *capabilityHeap = NULL;
8441 cam_capability_t *cap_ptr = NULL;
8442
8443 if (ops == NULL) {
8444 LOGE("Invalid arguments");
8445 return NULL;
8446 }
8447
8448 capabilityHeap = new QCamera3HeapMemory(1);
8449 if (capabilityHeap == NULL) {
8450 LOGE("creation of capabilityHeap failed");
8451 return NULL;
8452 }
8453
8454 /* Allocate memory for capability buffer */
8455 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8456 if(rc != OK) {
8457 LOGE("No memory for cappability");
8458 goto allocate_failed;
8459 }
8460
8461 /* Map memory for capability buffer */
8462 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8463
8464 rc = ops->map_buf(cam_handle,
8465 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8466 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8467 if(rc < 0) {
8468 LOGE("failed to map capability buffer");
8469 rc = FAILED_TRANSACTION;
8470 goto map_failed;
8471 }
8472
8473 /* Query Capability */
8474 rc = ops->query_capability(cam_handle);
8475 if(rc < 0) {
8476 LOGE("failed to query capability");
8477 rc = FAILED_TRANSACTION;
8478 goto query_failed;
8479 }
8480
8481 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8482 if (cap_ptr == NULL) {
8483 LOGE("out of memory");
8484 rc = NO_MEMORY;
8485 goto query_failed;
8486 }
8487
8488 memset(cap_ptr, 0, sizeof(cam_capability_t));
8489 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8490
8491 int index;
8492 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8493 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8494 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8495 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8496 }
8497
8498query_failed:
8499 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8500map_failed:
8501 capabilityHeap->deallocate();
8502allocate_failed:
8503 delete capabilityHeap;
8504
8505 if (rc != NO_ERROR) {
8506 return NULL;
8507 } else {
8508 return cap_ptr;
8509 }
8510}
8511
Thierry Strudel3d639192016-09-09 11:52:26 -07008512/*===========================================================================
8513 * FUNCTION : initCapabilities
8514 *
8515 * DESCRIPTION: initialize camera capabilities in static data struct
8516 *
8517 * PARAMETERS :
8518 * @cameraId : camera Id
8519 *
8520 * RETURN : int32_t type of status
8521 * NO_ERROR -- success
8522 * none-zero failure code
8523 *==========================================================================*/
8524int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8525{
8526 int rc = 0;
8527 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008528 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008529
8530 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8531 if (rc) {
8532 LOGE("camera_open failed. rc = %d", rc);
8533 goto open_failed;
8534 }
8535 if (!cameraHandle) {
8536 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8537 goto open_failed;
8538 }
8539
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008540 handle = get_main_camera_handle(cameraHandle->camera_handle);
8541 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8542 if (gCamCapability[cameraId] == NULL) {
8543 rc = FAILED_TRANSACTION;
8544 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008545 }
8546
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008547 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008548 if (is_dual_camera_by_idx(cameraId)) {
8549 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8550 gCamCapability[cameraId]->aux_cam_cap =
8551 getCapabilities(cameraHandle->ops, handle);
8552 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8553 rc = FAILED_TRANSACTION;
8554 free(gCamCapability[cameraId]);
8555 goto failed_op;
8556 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008557
8558 // Copy the main camera capability to main_cam_cap struct
8559 gCamCapability[cameraId]->main_cam_cap =
8560 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8561 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8562 LOGE("out of memory");
8563 rc = NO_MEMORY;
8564 goto failed_op;
8565 }
8566 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8567 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008568 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008569failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008570 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8571 cameraHandle = NULL;
8572open_failed:
8573 return rc;
8574}
8575
8576/*==========================================================================
8577 * FUNCTION : get3Aversion
8578 *
8579 * DESCRIPTION: get the Q3A S/W version
8580 *
8581 * PARAMETERS :
8582 * @sw_version: Reference of Q3A structure which will hold version info upon
8583 * return
8584 *
8585 * RETURN : None
8586 *
8587 *==========================================================================*/
8588void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8589{
8590 if(gCamCapability[mCameraId])
8591 sw_version = gCamCapability[mCameraId]->q3a_version;
8592 else
8593 LOGE("Capability structure NULL!");
8594}
8595
8596
8597/*===========================================================================
8598 * FUNCTION : initParameters
8599 *
8600 * DESCRIPTION: initialize camera parameters
8601 *
8602 * PARAMETERS :
8603 *
8604 * RETURN : int32_t type of status
8605 * NO_ERROR -- success
8606 * none-zero failure code
8607 *==========================================================================*/
8608int QCamera3HardwareInterface::initParameters()
8609{
8610 int rc = 0;
8611
8612 //Allocate Set Param Buffer
8613 mParamHeap = new QCamera3HeapMemory(1);
8614 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8615 if(rc != OK) {
8616 rc = NO_MEMORY;
8617 LOGE("Failed to allocate SETPARM Heap memory");
8618 delete mParamHeap;
8619 mParamHeap = NULL;
8620 return rc;
8621 }
8622
8623 //Map memory for parameters buffer
8624 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8625 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8626 mParamHeap->getFd(0),
8627 sizeof(metadata_buffer_t),
8628 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8629 if(rc < 0) {
8630 LOGE("failed to map SETPARM buffer");
8631 rc = FAILED_TRANSACTION;
8632 mParamHeap->deallocate();
8633 delete mParamHeap;
8634 mParamHeap = NULL;
8635 return rc;
8636 }
8637
8638 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8639
8640 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8641 return rc;
8642}
8643
8644/*===========================================================================
8645 * FUNCTION : deinitParameters
8646 *
8647 * DESCRIPTION: de-initialize camera parameters
8648 *
8649 * PARAMETERS :
8650 *
8651 * RETURN : NONE
8652 *==========================================================================*/
8653void QCamera3HardwareInterface::deinitParameters()
8654{
8655 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8656 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8657
8658 mParamHeap->deallocate();
8659 delete mParamHeap;
8660 mParamHeap = NULL;
8661
8662 mParameters = NULL;
8663
8664 free(mPrevParameters);
8665 mPrevParameters = NULL;
8666}
8667
8668/*===========================================================================
8669 * FUNCTION : calcMaxJpegSize
8670 *
8671 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8672 *
8673 * PARAMETERS :
8674 *
8675 * RETURN : max_jpeg_size
8676 *==========================================================================*/
8677size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8678{
8679 size_t max_jpeg_size = 0;
8680 size_t temp_width, temp_height;
8681 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8682 MAX_SIZES_CNT);
8683 for (size_t i = 0; i < count; i++) {
8684 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8685 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8686 if (temp_width * temp_height > max_jpeg_size ) {
8687 max_jpeg_size = temp_width * temp_height;
8688 }
8689 }
8690 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8691 return max_jpeg_size;
8692}
8693
8694/*===========================================================================
8695 * FUNCTION : getMaxRawSize
8696 *
8697 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8698 *
8699 * PARAMETERS :
8700 *
8701 * RETURN : Largest supported Raw Dimension
8702 *==========================================================================*/
8703cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8704{
8705 int max_width = 0;
8706 cam_dimension_t maxRawSize;
8707
8708 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8709 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8710 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8711 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8712 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8713 }
8714 }
8715 return maxRawSize;
8716}
8717
8718
8719/*===========================================================================
8720 * FUNCTION : calcMaxJpegDim
8721 *
8722 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8723 *
8724 * PARAMETERS :
8725 *
8726 * RETURN : max_jpeg_dim
8727 *==========================================================================*/
8728cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8729{
8730 cam_dimension_t max_jpeg_dim;
8731 cam_dimension_t curr_jpeg_dim;
8732 max_jpeg_dim.width = 0;
8733 max_jpeg_dim.height = 0;
8734 curr_jpeg_dim.width = 0;
8735 curr_jpeg_dim.height = 0;
8736 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8737 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8738 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8739 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8740 max_jpeg_dim.width * max_jpeg_dim.height ) {
8741 max_jpeg_dim.width = curr_jpeg_dim.width;
8742 max_jpeg_dim.height = curr_jpeg_dim.height;
8743 }
8744 }
8745 return max_jpeg_dim;
8746}
8747
8748/*===========================================================================
8749 * FUNCTION : addStreamConfig
8750 *
8751 * DESCRIPTION: adds the stream configuration to the array
8752 *
8753 * PARAMETERS :
8754 * @available_stream_configs : pointer to stream configuration array
8755 * @scalar_format : scalar format
8756 * @dim : configuration dimension
8757 * @config_type : input or output configuration type
8758 *
8759 * RETURN : NONE
8760 *==========================================================================*/
8761void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8762 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8763{
8764 available_stream_configs.add(scalar_format);
8765 available_stream_configs.add(dim.width);
8766 available_stream_configs.add(dim.height);
8767 available_stream_configs.add(config_type);
8768}
8769
8770/*===========================================================================
8771 * FUNCTION : suppportBurstCapture
8772 *
8773 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8774 *
8775 * PARAMETERS :
8776 * @cameraId : camera Id
8777 *
8778 * RETURN : true if camera supports BURST_CAPTURE
8779 * false otherwise
8780 *==========================================================================*/
8781bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8782{
8783 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8784 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8785 const int32_t highResWidth = 3264;
8786 const int32_t highResHeight = 2448;
8787
8788 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8789 // Maximum resolution images cannot be captured at >= 10fps
8790 // -> not supporting BURST_CAPTURE
8791 return false;
8792 }
8793
8794 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8795 // Maximum resolution images can be captured at >= 20fps
8796 // --> supporting BURST_CAPTURE
8797 return true;
8798 }
8799
8800 // Find the smallest highRes resolution, or largest resolution if there is none
8801 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8802 MAX_SIZES_CNT);
8803 size_t highRes = 0;
8804 while ((highRes + 1 < totalCnt) &&
8805 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8806 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8807 highResWidth * highResHeight)) {
8808 highRes++;
8809 }
8810 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8811 return true;
8812 } else {
8813 return false;
8814 }
8815}
8816
8817/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008818 * FUNCTION : getPDStatIndex
8819 *
8820 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8821 *
8822 * PARAMETERS :
8823 * @caps : camera capabilities
8824 *
8825 * RETURN : int32_t type
8826 * non-negative - on success
8827 * -1 - on failure
8828 *==========================================================================*/
8829int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8830 if (nullptr == caps) {
8831 return -1;
8832 }
8833
8834 uint32_t metaRawCount = caps->meta_raw_channel_count;
8835 int32_t ret = -1;
8836 for (size_t i = 0; i < metaRawCount; i++) {
8837 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8838 ret = i;
8839 break;
8840 }
8841 }
8842
8843 return ret;
8844}
8845
8846/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008847 * FUNCTION : initStaticMetadata
8848 *
8849 * DESCRIPTION: initialize the static metadata
8850 *
8851 * PARAMETERS :
8852 * @cameraId : camera Id
8853 *
8854 * RETURN : int32_t type of status
8855 * 0 -- success
8856 * non-zero failure code
8857 *==========================================================================*/
8858int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8859{
8860 int rc = 0;
8861 CameraMetadata staticInfo;
8862 size_t count = 0;
8863 bool limitedDevice = false;
8864 char prop[PROPERTY_VALUE_MAX];
8865 bool supportBurst = false;
8866
8867 supportBurst = supportBurstCapture(cameraId);
8868
8869 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8870 * guaranteed or if min fps of max resolution is less than 20 fps, its
8871 * advertised as limited device*/
8872 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8873 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8874 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8875 !supportBurst;
8876
8877 uint8_t supportedHwLvl = limitedDevice ?
8878 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008879#ifndef USE_HAL_3_3
8880 // LEVEL_3 - This device will support level 3.
8881 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8882#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008883 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008884#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008885
8886 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8887 &supportedHwLvl, 1);
8888
8889 bool facingBack = false;
8890 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8891 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8892 facingBack = true;
8893 }
8894 /*HAL 3 only*/
8895 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8896 &gCamCapability[cameraId]->min_focus_distance, 1);
8897
8898 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8899 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8900
8901 /*should be using focal lengths but sensor doesn't provide that info now*/
8902 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8903 &gCamCapability[cameraId]->focal_length,
8904 1);
8905
8906 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8907 gCamCapability[cameraId]->apertures,
8908 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8909
8910 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8911 gCamCapability[cameraId]->filter_densities,
8912 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8913
8914
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008915 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8916 size_t mode_count =
8917 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8918 for (size_t i = 0; i < mode_count; i++) {
8919 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008921 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008922 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008923
8924 int32_t lens_shading_map_size[] = {
8925 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8926 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8927 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8928 lens_shading_map_size,
8929 sizeof(lens_shading_map_size)/sizeof(int32_t));
8930
8931 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8932 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8933
8934 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8935 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8936
8937 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8938 &gCamCapability[cameraId]->max_frame_duration, 1);
8939
8940 camera_metadata_rational baseGainFactor = {
8941 gCamCapability[cameraId]->base_gain_factor.numerator,
8942 gCamCapability[cameraId]->base_gain_factor.denominator};
8943 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8944 &baseGainFactor, 1);
8945
8946 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8947 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8948
8949 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8950 gCamCapability[cameraId]->pixel_array_size.height};
8951 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8952 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8953
8954 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8955 gCamCapability[cameraId]->active_array_size.top,
8956 gCamCapability[cameraId]->active_array_size.width,
8957 gCamCapability[cameraId]->active_array_size.height};
8958 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8959 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8960
8961 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8962 &gCamCapability[cameraId]->white_level, 1);
8963
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008964 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8965 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8966 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008967 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008968 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008969
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008970#ifndef USE_HAL_3_3
8971 bool hasBlackRegions = false;
8972 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8973 LOGW("black_region_count: %d is bounded to %d",
8974 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8975 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8976 }
8977 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8978 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8979 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8980 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8981 }
8982 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8983 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8984 hasBlackRegions = true;
8985 }
8986#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008987 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8988 &gCamCapability[cameraId]->flash_charge_duration, 1);
8989
8990 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8991 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8992
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008993 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8994 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8995 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008996 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8997 &timestampSource, 1);
8998
Thierry Strudel54dc9782017-02-15 12:12:10 -08008999 //update histogram vendor data
9000 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009001 &gCamCapability[cameraId]->histogram_size, 1);
9002
Thierry Strudel54dc9782017-02-15 12:12:10 -08009003 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009004 &gCamCapability[cameraId]->max_histogram_count, 1);
9005
Shuzhen Wang14415f52016-11-16 18:26:18 -08009006 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9007 //so that app can request fewer number of bins than the maximum supported.
9008 std::vector<int32_t> histBins;
9009 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9010 histBins.push_back(maxHistBins);
9011 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9012 (maxHistBins & 0x1) == 0) {
9013 histBins.push_back(maxHistBins >> 1);
9014 maxHistBins >>= 1;
9015 }
9016 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9017 histBins.data(), histBins.size());
9018
Thierry Strudel3d639192016-09-09 11:52:26 -07009019 int32_t sharpness_map_size[] = {
9020 gCamCapability[cameraId]->sharpness_map_size.width,
9021 gCamCapability[cameraId]->sharpness_map_size.height};
9022
9023 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9024 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9025
9026 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9027 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9028
Emilian Peev0f3c3162017-03-15 12:57:46 +00009029 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9030 if (0 <= indexPD) {
9031 // Advertise PD stats data as part of the Depth capabilities
9032 int32_t depthWidth =
9033 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9034 int32_t depthHeight =
9035 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9036 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9037 assert(0 < depthSamplesCount);
9038 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9039 &depthSamplesCount, 1);
9040
9041 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9042 depthHeight,
9043 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9044 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9045 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9046 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9047 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9048
9049 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9050 depthHeight, 33333333,
9051 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9052 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9053 depthMinDuration,
9054 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9055
9056 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9057 depthHeight, 0,
9058 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9059 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9060 depthStallDuration,
9061 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9062
9063 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9064 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9065 }
9066
Thierry Strudel3d639192016-09-09 11:52:26 -07009067 int32_t scalar_formats[] = {
9068 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9069 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9070 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9071 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9072 HAL_PIXEL_FORMAT_RAW10,
9073 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009074 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9075 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9076 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009077
9078 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9079 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9080 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9081 count, MAX_SIZES_CNT, available_processed_sizes);
9082 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9083 available_processed_sizes, count * 2);
9084
9085 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9086 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9087 makeTable(gCamCapability[cameraId]->raw_dim,
9088 count, MAX_SIZES_CNT, available_raw_sizes);
9089 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9090 available_raw_sizes, count * 2);
9091
9092 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9093 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9094 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9095 count, MAX_SIZES_CNT, available_fps_ranges);
9096 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9097 available_fps_ranges, count * 2);
9098
9099 camera_metadata_rational exposureCompensationStep = {
9100 gCamCapability[cameraId]->exp_compensation_step.numerator,
9101 gCamCapability[cameraId]->exp_compensation_step.denominator};
9102 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9103 &exposureCompensationStep, 1);
9104
9105 Vector<uint8_t> availableVstabModes;
9106 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9107 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009108 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009109 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009110 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009112 count = IS_TYPE_MAX;
9113 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9114 for (size_t i = 0; i < count; i++) {
9115 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9116 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9117 eisSupported = true;
9118 break;
9119 }
9120 }
9121 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009122 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9123 }
9124 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9125 availableVstabModes.array(), availableVstabModes.size());
9126
9127 /*HAL 1 and HAL 3 common*/
9128 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9129 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9130 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009131 // Cap the max zoom to the max preferred value
9132 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009133 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9134 &maxZoom, 1);
9135
9136 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9137 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9138
9139 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9140 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9141 max3aRegions[2] = 0; /* AF not supported */
9142 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9143 max3aRegions, 3);
9144
9145 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9146 memset(prop, 0, sizeof(prop));
9147 property_get("persist.camera.facedetect", prop, "1");
9148 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9149 LOGD("Support face detection mode: %d",
9150 supportedFaceDetectMode);
9151
9152 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009153 /* support mode should be OFF if max number of face is 0 */
9154 if (maxFaces <= 0) {
9155 supportedFaceDetectMode = 0;
9156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009157 Vector<uint8_t> availableFaceDetectModes;
9158 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9159 if (supportedFaceDetectMode == 1) {
9160 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9161 } else if (supportedFaceDetectMode == 2) {
9162 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9163 } else if (supportedFaceDetectMode == 3) {
9164 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9165 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9166 } else {
9167 maxFaces = 0;
9168 }
9169 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9170 availableFaceDetectModes.array(),
9171 availableFaceDetectModes.size());
9172 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9173 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009174 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9175 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9176 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009177
9178 int32_t exposureCompensationRange[] = {
9179 gCamCapability[cameraId]->exposure_compensation_min,
9180 gCamCapability[cameraId]->exposure_compensation_max};
9181 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9182 exposureCompensationRange,
9183 sizeof(exposureCompensationRange)/sizeof(int32_t));
9184
9185 uint8_t lensFacing = (facingBack) ?
9186 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9187 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9188
9189 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9190 available_thumbnail_sizes,
9191 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9192
9193 /*all sizes will be clubbed into this tag*/
9194 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9195 /*android.scaler.availableStreamConfigurations*/
9196 Vector<int32_t> available_stream_configs;
9197 cam_dimension_t active_array_dim;
9198 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9199 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009200
9201 /*advertise list of input dimensions supported based on below property.
9202 By default all sizes upto 5MP will be advertised.
9203 Note that the setprop resolution format should be WxH.
9204 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9205 To list all supported sizes, setprop needs to be set with "0x0" */
9206 cam_dimension_t minInputSize = {2592,1944}; //5MP
9207 memset(prop, 0, sizeof(prop));
9208 property_get("persist.camera.input.minsize", prop, "2592x1944");
9209 if (strlen(prop) > 0) {
9210 char *saveptr = NULL;
9211 char *token = strtok_r(prop, "x", &saveptr);
9212 if (token != NULL) {
9213 minInputSize.width = atoi(token);
9214 }
9215 token = strtok_r(NULL, "x", &saveptr);
9216 if (token != NULL) {
9217 minInputSize.height = atoi(token);
9218 }
9219 }
9220
Thierry Strudel3d639192016-09-09 11:52:26 -07009221 /* Add input/output stream configurations for each scalar formats*/
9222 for (size_t j = 0; j < scalar_formats_count; j++) {
9223 switch (scalar_formats[j]) {
9224 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9225 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9226 case HAL_PIXEL_FORMAT_RAW10:
9227 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9228 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9229 addStreamConfig(available_stream_configs, scalar_formats[j],
9230 gCamCapability[cameraId]->raw_dim[i],
9231 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9232 }
9233 break;
9234 case HAL_PIXEL_FORMAT_BLOB:
9235 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9236 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9237 addStreamConfig(available_stream_configs, scalar_formats[j],
9238 gCamCapability[cameraId]->picture_sizes_tbl[i],
9239 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9240 }
9241 break;
9242 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9243 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9244 default:
9245 cam_dimension_t largest_picture_size;
9246 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9247 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9248 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9249 addStreamConfig(available_stream_configs, scalar_formats[j],
9250 gCamCapability[cameraId]->picture_sizes_tbl[i],
9251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009252 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9253 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9254 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9255 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9256 >= minInputSize.width) || (gCamCapability[cameraId]->
9257 picture_sizes_tbl[i].height >= minInputSize.height)) {
9258 addStreamConfig(available_stream_configs, scalar_formats[j],
9259 gCamCapability[cameraId]->picture_sizes_tbl[i],
9260 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9261 }
9262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009263 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009264
Thierry Strudel3d639192016-09-09 11:52:26 -07009265 break;
9266 }
9267 }
9268
9269 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9270 available_stream_configs.array(), available_stream_configs.size());
9271 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9272 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9273
9274 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9275 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9276
9277 /* android.scaler.availableMinFrameDurations */
9278 Vector<int64_t> available_min_durations;
9279 for (size_t j = 0; j < scalar_formats_count; j++) {
9280 switch (scalar_formats[j]) {
9281 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9282 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9283 case HAL_PIXEL_FORMAT_RAW10:
9284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9285 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9286 available_min_durations.add(scalar_formats[j]);
9287 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9288 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9289 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9290 }
9291 break;
9292 default:
9293 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9294 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9295 available_min_durations.add(scalar_formats[j]);
9296 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9297 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9298 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9299 }
9300 break;
9301 }
9302 }
9303 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9304 available_min_durations.array(), available_min_durations.size());
9305
9306 Vector<int32_t> available_hfr_configs;
9307 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9308 int32_t fps = 0;
9309 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9310 case CAM_HFR_MODE_60FPS:
9311 fps = 60;
9312 break;
9313 case CAM_HFR_MODE_90FPS:
9314 fps = 90;
9315 break;
9316 case CAM_HFR_MODE_120FPS:
9317 fps = 120;
9318 break;
9319 case CAM_HFR_MODE_150FPS:
9320 fps = 150;
9321 break;
9322 case CAM_HFR_MODE_180FPS:
9323 fps = 180;
9324 break;
9325 case CAM_HFR_MODE_210FPS:
9326 fps = 210;
9327 break;
9328 case CAM_HFR_MODE_240FPS:
9329 fps = 240;
9330 break;
9331 case CAM_HFR_MODE_480FPS:
9332 fps = 480;
9333 break;
9334 case CAM_HFR_MODE_OFF:
9335 case CAM_HFR_MODE_MAX:
9336 default:
9337 break;
9338 }
9339
9340 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9341 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9342 /* For each HFR frame rate, need to advertise one variable fps range
9343 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9344 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9345 * set by the app. When video recording is started, [120, 120] is
9346 * set. This way sensor configuration does not change when recording
9347 * is started */
9348
9349 /* (width, height, fps_min, fps_max, batch_size_max) */
9350 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9351 j < MAX_SIZES_CNT; j++) {
9352 available_hfr_configs.add(
9353 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9354 available_hfr_configs.add(
9355 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9356 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9357 available_hfr_configs.add(fps);
9358 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9359
9360 /* (width, height, fps_min, fps_max, batch_size_max) */
9361 available_hfr_configs.add(
9362 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9363 available_hfr_configs.add(
9364 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9365 available_hfr_configs.add(fps);
9366 available_hfr_configs.add(fps);
9367 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9368 }
9369 }
9370 }
9371 //Advertise HFR capability only if the property is set
9372 memset(prop, 0, sizeof(prop));
9373 property_get("persist.camera.hal3hfr.enable", prop, "1");
9374 uint8_t hfrEnable = (uint8_t)atoi(prop);
9375
9376 if(hfrEnable && available_hfr_configs.array()) {
9377 staticInfo.update(
9378 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9379 available_hfr_configs.array(), available_hfr_configs.size());
9380 }
9381
9382 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9383 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9384 &max_jpeg_size, 1);
9385
9386 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9387 size_t size = 0;
9388 count = CAM_EFFECT_MODE_MAX;
9389 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9390 for (size_t i = 0; i < count; i++) {
9391 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9392 gCamCapability[cameraId]->supported_effects[i]);
9393 if (NAME_NOT_FOUND != val) {
9394 avail_effects[size] = (uint8_t)val;
9395 size++;
9396 }
9397 }
9398 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9399 avail_effects,
9400 size);
9401
9402 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9403 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9404 size_t supported_scene_modes_cnt = 0;
9405 count = CAM_SCENE_MODE_MAX;
9406 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9407 for (size_t i = 0; i < count; i++) {
9408 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9409 CAM_SCENE_MODE_OFF) {
9410 int val = lookupFwkName(SCENE_MODES_MAP,
9411 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9412 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009413
Thierry Strudel3d639192016-09-09 11:52:26 -07009414 if (NAME_NOT_FOUND != val) {
9415 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9416 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9417 supported_scene_modes_cnt++;
9418 }
9419 }
9420 }
9421 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9422 avail_scene_modes,
9423 supported_scene_modes_cnt);
9424
9425 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9426 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9427 supported_scene_modes_cnt,
9428 CAM_SCENE_MODE_MAX,
9429 scene_mode_overrides,
9430 supported_indexes,
9431 cameraId);
9432
9433 if (supported_scene_modes_cnt == 0) {
9434 supported_scene_modes_cnt = 1;
9435 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9436 }
9437
9438 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9439 scene_mode_overrides, supported_scene_modes_cnt * 3);
9440
9441 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9442 ANDROID_CONTROL_MODE_AUTO,
9443 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9444 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9445 available_control_modes,
9446 3);
9447
9448 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9449 size = 0;
9450 count = CAM_ANTIBANDING_MODE_MAX;
9451 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9452 for (size_t i = 0; i < count; i++) {
9453 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9454 gCamCapability[cameraId]->supported_antibandings[i]);
9455 if (NAME_NOT_FOUND != val) {
9456 avail_antibanding_modes[size] = (uint8_t)val;
9457 size++;
9458 }
9459
9460 }
9461 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9462 avail_antibanding_modes,
9463 size);
9464
9465 uint8_t avail_abberation_modes[] = {
9466 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9467 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9468 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9469 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9470 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9471 if (0 == count) {
9472 // If no aberration correction modes are available for a device, this advertise OFF mode
9473 size = 1;
9474 } else {
9475 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9476 // So, advertize all 3 modes if atleast any one mode is supported as per the
9477 // new M requirement
9478 size = 3;
9479 }
9480 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9481 avail_abberation_modes,
9482 size);
9483
9484 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9485 size = 0;
9486 count = CAM_FOCUS_MODE_MAX;
9487 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9488 for (size_t i = 0; i < count; i++) {
9489 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9490 gCamCapability[cameraId]->supported_focus_modes[i]);
9491 if (NAME_NOT_FOUND != val) {
9492 avail_af_modes[size] = (uint8_t)val;
9493 size++;
9494 }
9495 }
9496 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9497 avail_af_modes,
9498 size);
9499
9500 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9501 size = 0;
9502 count = CAM_WB_MODE_MAX;
9503 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9504 for (size_t i = 0; i < count; i++) {
9505 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9506 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9507 gCamCapability[cameraId]->supported_white_balances[i]);
9508 if (NAME_NOT_FOUND != val) {
9509 avail_awb_modes[size] = (uint8_t)val;
9510 size++;
9511 }
9512 }
9513 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9514 avail_awb_modes,
9515 size);
9516
9517 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9518 count = CAM_FLASH_FIRING_LEVEL_MAX;
9519 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9520 count);
9521 for (size_t i = 0; i < count; i++) {
9522 available_flash_levels[i] =
9523 gCamCapability[cameraId]->supported_firing_levels[i];
9524 }
9525 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9526 available_flash_levels, count);
9527
9528 uint8_t flashAvailable;
9529 if (gCamCapability[cameraId]->flash_available)
9530 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9531 else
9532 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9533 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9534 &flashAvailable, 1);
9535
9536 Vector<uint8_t> avail_ae_modes;
9537 count = CAM_AE_MODE_MAX;
9538 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9539 for (size_t i = 0; i < count; i++) {
9540 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9541 }
9542 if (flashAvailable) {
9543 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9544 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009545 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009546 }
9547 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9548 avail_ae_modes.array(),
9549 avail_ae_modes.size());
9550
9551 int32_t sensitivity_range[2];
9552 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9553 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9554 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9555 sensitivity_range,
9556 sizeof(sensitivity_range) / sizeof(int32_t));
9557
9558 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9559 &gCamCapability[cameraId]->max_analog_sensitivity,
9560 1);
9561
9562 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9563 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9564 &sensor_orientation,
9565 1);
9566
9567 int32_t max_output_streams[] = {
9568 MAX_STALLING_STREAMS,
9569 MAX_PROCESSED_STREAMS,
9570 MAX_RAW_STREAMS};
9571 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9572 max_output_streams,
9573 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9574
9575 uint8_t avail_leds = 0;
9576 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9577 &avail_leds, 0);
9578
9579 uint8_t focus_dist_calibrated;
9580 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9581 gCamCapability[cameraId]->focus_dist_calibrated);
9582 if (NAME_NOT_FOUND != val) {
9583 focus_dist_calibrated = (uint8_t)val;
9584 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9585 &focus_dist_calibrated, 1);
9586 }
9587
9588 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9589 size = 0;
9590 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9591 MAX_TEST_PATTERN_CNT);
9592 for (size_t i = 0; i < count; i++) {
9593 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9594 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9595 if (NAME_NOT_FOUND != testpatternMode) {
9596 avail_testpattern_modes[size] = testpatternMode;
9597 size++;
9598 }
9599 }
9600 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9601 avail_testpattern_modes,
9602 size);
9603
9604 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9605 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9606 &max_pipeline_depth,
9607 1);
9608
9609 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9610 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9611 &partial_result_count,
9612 1);
9613
9614 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9615 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9616
9617 Vector<uint8_t> available_capabilities;
9618 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9619 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9620 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9621 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9622 if (supportBurst) {
9623 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9624 }
9625 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9626 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9627 if (hfrEnable && available_hfr_configs.array()) {
9628 available_capabilities.add(
9629 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9630 }
9631
9632 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9633 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9634 }
9635 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9636 available_capabilities.array(),
9637 available_capabilities.size());
9638
9639 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9640 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9641 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9642 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9643
9644 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9645 &aeLockAvailable, 1);
9646
9647 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9648 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9649 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9650 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9651
9652 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9653 &awbLockAvailable, 1);
9654
9655 int32_t max_input_streams = 1;
9656 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9657 &max_input_streams,
9658 1);
9659
9660 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9661 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9662 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9663 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9664 HAL_PIXEL_FORMAT_YCbCr_420_888};
9665 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9666 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9667
9668 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9669 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9670 &max_latency,
9671 1);
9672
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009673#ifndef USE_HAL_3_3
9674 int32_t isp_sensitivity_range[2];
9675 isp_sensitivity_range[0] =
9676 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9677 isp_sensitivity_range[1] =
9678 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9679 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9680 isp_sensitivity_range,
9681 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9682#endif
9683
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9685 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9686 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9687 available_hot_pixel_modes,
9688 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9689
9690 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9691 ANDROID_SHADING_MODE_FAST,
9692 ANDROID_SHADING_MODE_HIGH_QUALITY};
9693 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9694 available_shading_modes,
9695 3);
9696
9697 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9698 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9699 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9700 available_lens_shading_map_modes,
9701 2);
9702
9703 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9704 ANDROID_EDGE_MODE_FAST,
9705 ANDROID_EDGE_MODE_HIGH_QUALITY,
9706 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9707 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9708 available_edge_modes,
9709 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9710
9711 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9712 ANDROID_NOISE_REDUCTION_MODE_FAST,
9713 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9714 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9715 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9716 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9717 available_noise_red_modes,
9718 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9719
9720 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9721 ANDROID_TONEMAP_MODE_FAST,
9722 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9723 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9724 available_tonemap_modes,
9725 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9726
9727 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9728 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9729 available_hot_pixel_map_modes,
9730 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9731
9732 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9733 gCamCapability[cameraId]->reference_illuminant1);
9734 if (NAME_NOT_FOUND != val) {
9735 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9736 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9737 }
9738
9739 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9740 gCamCapability[cameraId]->reference_illuminant2);
9741 if (NAME_NOT_FOUND != val) {
9742 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9743 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9744 }
9745
9746 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9747 (void *)gCamCapability[cameraId]->forward_matrix1,
9748 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9749
9750 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9751 (void *)gCamCapability[cameraId]->forward_matrix2,
9752 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9753
9754 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9755 (void *)gCamCapability[cameraId]->color_transform1,
9756 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9757
9758 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9759 (void *)gCamCapability[cameraId]->color_transform2,
9760 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9761
9762 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9763 (void *)gCamCapability[cameraId]->calibration_transform1,
9764 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9765
9766 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9767 (void *)gCamCapability[cameraId]->calibration_transform2,
9768 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9769
9770 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9771 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9772 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9773 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9774 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9775 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9776 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9777 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9778 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9779 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9780 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9781 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9782 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9783 ANDROID_JPEG_GPS_COORDINATES,
9784 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9785 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9786 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9787 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9788 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9789 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9790 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9791 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9792 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9793 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009794#ifndef USE_HAL_3_3
9795 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9796#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009797 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009798 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009799 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9800 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009801 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009802 /* DevCamDebug metadata request_keys_basic */
9803 DEVCAMDEBUG_META_ENABLE,
9804 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009805 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9806 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009807 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009808
9809 size_t request_keys_cnt =
9810 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9811 Vector<int32_t> available_request_keys;
9812 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9813 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9814 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9815 }
9816
9817 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9818 available_request_keys.array(), available_request_keys.size());
9819
9820 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9821 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9822 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9823 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9824 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9825 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9826 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9827 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9828 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9829 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9830 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9831 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9832 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9833 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9834 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9835 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9836 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009837 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009838 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9839 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9840 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009841 ANDROID_STATISTICS_FACE_SCORES,
9842#ifndef USE_HAL_3_3
9843 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9844#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009845 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009846 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009847 // DevCamDebug metadata result_keys_basic
9848 DEVCAMDEBUG_META_ENABLE,
9849 // DevCamDebug metadata result_keys AF
9850 DEVCAMDEBUG_AF_LENS_POSITION,
9851 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9852 DEVCAMDEBUG_AF_TOF_DISTANCE,
9853 DEVCAMDEBUG_AF_LUMA,
9854 DEVCAMDEBUG_AF_HAF_STATE,
9855 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9856 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9857 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9858 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9859 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9860 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9861 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9862 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9863 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9864 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9865 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9866 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9867 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9868 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9869 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9870 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9871 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9872 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9873 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9874 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9875 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9876 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9877 // DevCamDebug metadata result_keys AEC
9878 DEVCAMDEBUG_AEC_TARGET_LUMA,
9879 DEVCAMDEBUG_AEC_COMP_LUMA,
9880 DEVCAMDEBUG_AEC_AVG_LUMA,
9881 DEVCAMDEBUG_AEC_CUR_LUMA,
9882 DEVCAMDEBUG_AEC_LINECOUNT,
9883 DEVCAMDEBUG_AEC_REAL_GAIN,
9884 DEVCAMDEBUG_AEC_EXP_INDEX,
9885 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009886 // DevCamDebug metadata result_keys zzHDR
9887 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9888 DEVCAMDEBUG_AEC_L_LINECOUNT,
9889 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9890 DEVCAMDEBUG_AEC_S_LINECOUNT,
9891 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9892 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9893 // DevCamDebug metadata result_keys ADRC
9894 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9895 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9896 DEVCAMDEBUG_AEC_GTM_RATIO,
9897 DEVCAMDEBUG_AEC_LTM_RATIO,
9898 DEVCAMDEBUG_AEC_LA_RATIO,
9899 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009900 // DevCamDebug metadata result_keys AWB
9901 DEVCAMDEBUG_AWB_R_GAIN,
9902 DEVCAMDEBUG_AWB_G_GAIN,
9903 DEVCAMDEBUG_AWB_B_GAIN,
9904 DEVCAMDEBUG_AWB_CCT,
9905 DEVCAMDEBUG_AWB_DECISION,
9906 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009907 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9908 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9909 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009910 };
9911
Thierry Strudel3d639192016-09-09 11:52:26 -07009912 size_t result_keys_cnt =
9913 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9914
9915 Vector<int32_t> available_result_keys;
9916 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9917 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9918 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9919 }
9920 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9921 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9922 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9923 }
9924 if (supportedFaceDetectMode == 1) {
9925 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9926 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9927 } else if ((supportedFaceDetectMode == 2) ||
9928 (supportedFaceDetectMode == 3)) {
9929 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9930 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9931 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009932#ifndef USE_HAL_3_3
9933 if (hasBlackRegions) {
9934 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9935 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9936 }
9937#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009938 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9939 available_result_keys.array(), available_result_keys.size());
9940
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009941 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009942 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9943 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9944 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9945 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9946 ANDROID_SCALER_CROPPING_TYPE,
9947 ANDROID_SYNC_MAX_LATENCY,
9948 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9949 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9950 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9951 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9952 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9953 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9954 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9955 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9956 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9957 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9958 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9959 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9960 ANDROID_LENS_FACING,
9961 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9962 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9963 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9964 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9965 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9966 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9967 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9968 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9969 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9970 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9971 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9972 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9973 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9974 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9975 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9976 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9977 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9978 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9979 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9980 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009981 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009982 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9983 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9984 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9985 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9986 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9987 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9988 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9989 ANDROID_CONTROL_AVAILABLE_MODES,
9990 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9991 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9992 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9993 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009994 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9995#ifndef USE_HAL_3_3
9996 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9997 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9998#endif
9999 };
10000
10001 Vector<int32_t> available_characteristics_keys;
10002 available_characteristics_keys.appendArray(characteristics_keys_basic,
10003 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10004#ifndef USE_HAL_3_3
10005 if (hasBlackRegions) {
10006 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10007 }
10008#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010009
10010 if (0 <= indexPD) {
10011 int32_t depthKeys[] = {
10012 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10013 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10014 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10015 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10016 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10017 };
10018 available_characteristics_keys.appendArray(depthKeys,
10019 sizeof(depthKeys) / sizeof(depthKeys[0]));
10020 }
10021
Thierry Strudel3d639192016-09-09 11:52:26 -070010022 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010023 available_characteristics_keys.array(),
10024 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010025
10026 /*available stall durations depend on the hw + sw and will be different for different devices */
10027 /*have to add for raw after implementation*/
10028 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10029 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10030
10031 Vector<int64_t> available_stall_durations;
10032 for (uint32_t j = 0; j < stall_formats_count; j++) {
10033 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10034 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10035 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10036 available_stall_durations.add(stall_formats[j]);
10037 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10038 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10039 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10040 }
10041 } else {
10042 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10043 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10044 available_stall_durations.add(stall_formats[j]);
10045 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10046 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10047 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10048 }
10049 }
10050 }
10051 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10052 available_stall_durations.array(),
10053 available_stall_durations.size());
10054
10055 //QCAMERA3_OPAQUE_RAW
10056 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10057 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10058 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10059 case LEGACY_RAW:
10060 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10061 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10062 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10063 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10064 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10065 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10066 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10067 break;
10068 case MIPI_RAW:
10069 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10070 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10071 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10072 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10073 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10074 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10075 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10076 break;
10077 default:
10078 LOGE("unknown opaque_raw_format %d",
10079 gCamCapability[cameraId]->opaque_raw_fmt);
10080 break;
10081 }
10082 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10083
10084 Vector<int32_t> strides;
10085 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10086 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10087 cam_stream_buf_plane_info_t buf_planes;
10088 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10089 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10090 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10091 &gCamCapability[cameraId]->padding_info, &buf_planes);
10092 strides.add(buf_planes.plane_info.mp[0].stride);
10093 }
10094 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10095 strides.size());
10096
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010097 //TBD: remove the following line once backend advertises zzHDR in feature mask
10098 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010099 //Video HDR default
10100 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10101 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010102 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010103 int32_t vhdr_mode[] = {
10104 QCAMERA3_VIDEO_HDR_MODE_OFF,
10105 QCAMERA3_VIDEO_HDR_MODE_ON};
10106
10107 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10108 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10109 vhdr_mode, vhdr_mode_count);
10110 }
10111
Thierry Strudel3d639192016-09-09 11:52:26 -070010112 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10113 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10114 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10115
10116 uint8_t isMonoOnly =
10117 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10118 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10119 &isMonoOnly, 1);
10120
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010121#ifndef USE_HAL_3_3
10122 Vector<int32_t> opaque_size;
10123 for (size_t j = 0; j < scalar_formats_count; j++) {
10124 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10125 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10126 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10127 cam_stream_buf_plane_info_t buf_planes;
10128
10129 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10130 &gCamCapability[cameraId]->padding_info, &buf_planes);
10131
10132 if (rc == 0) {
10133 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10134 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10135 opaque_size.add(buf_planes.plane_info.frame_len);
10136 }else {
10137 LOGE("raw frame calculation failed!");
10138 }
10139 }
10140 }
10141 }
10142
10143 if ((opaque_size.size() > 0) &&
10144 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10145 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10146 else
10147 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10148#endif
10149
Thierry Strudel04e026f2016-10-10 11:27:36 -070010150 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10151 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10152 size = 0;
10153 count = CAM_IR_MODE_MAX;
10154 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10155 for (size_t i = 0; i < count; i++) {
10156 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10157 gCamCapability[cameraId]->supported_ir_modes[i]);
10158 if (NAME_NOT_FOUND != val) {
10159 avail_ir_modes[size] = (int32_t)val;
10160 size++;
10161 }
10162 }
10163 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10164 avail_ir_modes, size);
10165 }
10166
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010167 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10168 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10169 size = 0;
10170 count = CAM_AEC_CONVERGENCE_MAX;
10171 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10172 for (size_t i = 0; i < count; i++) {
10173 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10174 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10175 if (NAME_NOT_FOUND != val) {
10176 available_instant_aec_modes[size] = (int32_t)val;
10177 size++;
10178 }
10179 }
10180 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10181 available_instant_aec_modes, size);
10182 }
10183
Thierry Strudel54dc9782017-02-15 12:12:10 -080010184 int32_t sharpness_range[] = {
10185 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10186 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10187 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10188
10189 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10190 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10191 size = 0;
10192 count = CAM_BINNING_CORRECTION_MODE_MAX;
10193 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10194 for (size_t i = 0; i < count; i++) {
10195 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10196 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10197 gCamCapability[cameraId]->supported_binning_modes[i]);
10198 if (NAME_NOT_FOUND != val) {
10199 avail_binning_modes[size] = (int32_t)val;
10200 size++;
10201 }
10202 }
10203 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10204 avail_binning_modes, size);
10205 }
10206
10207 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10208 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10209 size = 0;
10210 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10211 for (size_t i = 0; i < count; i++) {
10212 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10213 gCamCapability[cameraId]->supported_aec_modes[i]);
10214 if (NAME_NOT_FOUND != val)
10215 available_aec_modes[size++] = val;
10216 }
10217 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10218 available_aec_modes, size);
10219 }
10220
10221 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10222 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10223 size = 0;
10224 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10225 for (size_t i = 0; i < count; i++) {
10226 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10227 gCamCapability[cameraId]->supported_iso_modes[i]);
10228 if (NAME_NOT_FOUND != val)
10229 available_iso_modes[size++] = val;
10230 }
10231 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10232 available_iso_modes, size);
10233 }
10234
10235 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10236 for (size_t i = 0; i < count; i++)
10237 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10238 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10239 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10240
10241 int32_t available_saturation_range[4];
10242 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10243 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10244 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10245 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10246 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10247 available_saturation_range, 4);
10248
10249 uint8_t is_hdr_values[2];
10250 is_hdr_values[0] = 0;
10251 is_hdr_values[1] = 1;
10252 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10253 is_hdr_values, 2);
10254
10255 float is_hdr_confidence_range[2];
10256 is_hdr_confidence_range[0] = 0.0;
10257 is_hdr_confidence_range[1] = 1.0;
10258 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10259 is_hdr_confidence_range, 2);
10260
Emilian Peev0a972ef2017-03-16 10:25:53 +000010261 size_t eepromLength = strnlen(
10262 reinterpret_cast<const char *>(
10263 gCamCapability[cameraId]->eeprom_version_info),
10264 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10265 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010266 char easelInfo[] = ",E:N";
10267 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10268 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10269 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010270 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10271 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010272 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010273 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10274 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10275 }
10276
Thierry Strudel3d639192016-09-09 11:52:26 -070010277 gStaticMetadata[cameraId] = staticInfo.release();
10278 return rc;
10279}
10280
10281/*===========================================================================
10282 * FUNCTION : makeTable
10283 *
10284 * DESCRIPTION: make a table of sizes
10285 *
10286 * PARAMETERS :
10287 *
10288 *
10289 *==========================================================================*/
10290void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10291 size_t max_size, int32_t *sizeTable)
10292{
10293 size_t j = 0;
10294 if (size > max_size) {
10295 size = max_size;
10296 }
10297 for (size_t i = 0; i < size; i++) {
10298 sizeTable[j] = dimTable[i].width;
10299 sizeTable[j+1] = dimTable[i].height;
10300 j+=2;
10301 }
10302}
10303
10304/*===========================================================================
10305 * FUNCTION : makeFPSTable
10306 *
10307 * DESCRIPTION: make a table of fps ranges
10308 *
10309 * PARAMETERS :
10310 *
10311 *==========================================================================*/
10312void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10313 size_t max_size, int32_t *fpsRangesTable)
10314{
10315 size_t j = 0;
10316 if (size > max_size) {
10317 size = max_size;
10318 }
10319 for (size_t i = 0; i < size; i++) {
10320 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10321 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10322 j+=2;
10323 }
10324}
10325
10326/*===========================================================================
10327 * FUNCTION : makeOverridesList
10328 *
10329 * DESCRIPTION: make a list of scene mode overrides
10330 *
10331 * PARAMETERS :
10332 *
10333 *
10334 *==========================================================================*/
10335void QCamera3HardwareInterface::makeOverridesList(
10336 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10337 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10338{
10339 /*daemon will give a list of overrides for all scene modes.
10340 However we should send the fwk only the overrides for the scene modes
10341 supported by the framework*/
10342 size_t j = 0;
10343 if (size > max_size) {
10344 size = max_size;
10345 }
10346 size_t focus_count = CAM_FOCUS_MODE_MAX;
10347 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10348 focus_count);
10349 for (size_t i = 0; i < size; i++) {
10350 bool supt = false;
10351 size_t index = supported_indexes[i];
10352 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10353 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10354 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10355 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10356 overridesTable[index].awb_mode);
10357 if (NAME_NOT_FOUND != val) {
10358 overridesList[j+1] = (uint8_t)val;
10359 }
10360 uint8_t focus_override = overridesTable[index].af_mode;
10361 for (size_t k = 0; k < focus_count; k++) {
10362 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10363 supt = true;
10364 break;
10365 }
10366 }
10367 if (supt) {
10368 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10369 focus_override);
10370 if (NAME_NOT_FOUND != val) {
10371 overridesList[j+2] = (uint8_t)val;
10372 }
10373 } else {
10374 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10375 }
10376 j+=3;
10377 }
10378}
10379
10380/*===========================================================================
10381 * FUNCTION : filterJpegSizes
10382 *
10383 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10384 * could be downscaled to
10385 *
10386 * PARAMETERS :
10387 *
10388 * RETURN : length of jpegSizes array
10389 *==========================================================================*/
10390
10391size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10392 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10393 uint8_t downscale_factor)
10394{
10395 if (0 == downscale_factor) {
10396 downscale_factor = 1;
10397 }
10398
10399 int32_t min_width = active_array_size.width / downscale_factor;
10400 int32_t min_height = active_array_size.height / downscale_factor;
10401 size_t jpegSizesCnt = 0;
10402 if (processedSizesCnt > maxCount) {
10403 processedSizesCnt = maxCount;
10404 }
10405 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10406 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10407 jpegSizes[jpegSizesCnt] = processedSizes[i];
10408 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10409 jpegSizesCnt += 2;
10410 }
10411 }
10412 return jpegSizesCnt;
10413}
10414
10415/*===========================================================================
10416 * FUNCTION : computeNoiseModelEntryS
10417 *
10418 * DESCRIPTION: function to map a given sensitivity to the S noise
10419 * model parameters in the DNG noise model.
10420 *
10421 * PARAMETERS : sens : the sensor sensitivity
10422 *
10423 ** RETURN : S (sensor amplification) noise
10424 *
10425 *==========================================================================*/
10426double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10427 double s = gCamCapability[mCameraId]->gradient_S * sens +
10428 gCamCapability[mCameraId]->offset_S;
10429 return ((s < 0.0) ? 0.0 : s);
10430}
10431
10432/*===========================================================================
10433 * FUNCTION : computeNoiseModelEntryO
10434 *
10435 * DESCRIPTION: function to map a given sensitivity to the O noise
10436 * model parameters in the DNG noise model.
10437 *
10438 * PARAMETERS : sens : the sensor sensitivity
10439 *
10440 ** RETURN : O (sensor readout) noise
10441 *
10442 *==========================================================================*/
10443double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10444 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10445 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10446 1.0 : (1.0 * sens / max_analog_sens);
10447 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10448 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10449 return ((o < 0.0) ? 0.0 : o);
10450}
10451
10452/*===========================================================================
10453 * FUNCTION : getSensorSensitivity
10454 *
10455 * DESCRIPTION: convert iso_mode to an integer value
10456 *
10457 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10458 *
10459 ** RETURN : sensitivity supported by sensor
10460 *
10461 *==========================================================================*/
10462int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10463{
10464 int32_t sensitivity;
10465
10466 switch (iso_mode) {
10467 case CAM_ISO_MODE_100:
10468 sensitivity = 100;
10469 break;
10470 case CAM_ISO_MODE_200:
10471 sensitivity = 200;
10472 break;
10473 case CAM_ISO_MODE_400:
10474 sensitivity = 400;
10475 break;
10476 case CAM_ISO_MODE_800:
10477 sensitivity = 800;
10478 break;
10479 case CAM_ISO_MODE_1600:
10480 sensitivity = 1600;
10481 break;
10482 default:
10483 sensitivity = -1;
10484 break;
10485 }
10486 return sensitivity;
10487}
10488
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010489int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010490 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010491 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10492 // to connect to Easel.
10493 bool doNotpowerOnEasel =
10494 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10495
10496 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010497 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10498 return OK;
10499 }
10500
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010501 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010502 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010503 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010504 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010505 return res;
10506 }
10507
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010508 EaselManagerClientOpened = true;
10509
10510 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010511 if (res != OK) {
10512 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10513 }
10514
10515 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010516 }
10517
10518 return OK;
10519}
10520
Thierry Strudel3d639192016-09-09 11:52:26 -070010521/*===========================================================================
10522 * FUNCTION : getCamInfo
10523 *
10524 * DESCRIPTION: query camera capabilities
10525 *
10526 * PARAMETERS :
10527 * @cameraId : camera Id
10528 * @info : camera info struct to be filled in with camera capabilities
10529 *
10530 * RETURN : int type of status
10531 * NO_ERROR -- success
10532 * none-zero failure code
10533 *==========================================================================*/
10534int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10535 struct camera_info *info)
10536{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010537 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010538 int rc = 0;
10539
10540 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010541
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010542 {
10543 Mutex::Autolock l(gHdrPlusClientLock);
10544 rc = initHdrPlusClientLocked();
10545 if (rc != OK) {
10546 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10547 pthread_mutex_unlock(&gCamLock);
10548 return rc;
10549 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010550 }
10551
Thierry Strudel3d639192016-09-09 11:52:26 -070010552 if (NULL == gCamCapability[cameraId]) {
10553 rc = initCapabilities(cameraId);
10554 if (rc < 0) {
10555 pthread_mutex_unlock(&gCamLock);
10556 return rc;
10557 }
10558 }
10559
10560 if (NULL == gStaticMetadata[cameraId]) {
10561 rc = initStaticMetadata(cameraId);
10562 if (rc < 0) {
10563 pthread_mutex_unlock(&gCamLock);
10564 return rc;
10565 }
10566 }
10567
10568 switch(gCamCapability[cameraId]->position) {
10569 case CAM_POSITION_BACK:
10570 case CAM_POSITION_BACK_AUX:
10571 info->facing = CAMERA_FACING_BACK;
10572 break;
10573
10574 case CAM_POSITION_FRONT:
10575 case CAM_POSITION_FRONT_AUX:
10576 info->facing = CAMERA_FACING_FRONT;
10577 break;
10578
10579 default:
10580 LOGE("Unknown position type %d for camera id:%d",
10581 gCamCapability[cameraId]->position, cameraId);
10582 rc = -1;
10583 break;
10584 }
10585
10586
10587 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010588#ifndef USE_HAL_3_3
10589 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10590#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010591 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010592#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010593 info->static_camera_characteristics = gStaticMetadata[cameraId];
10594
10595 //For now assume both cameras can operate independently.
10596 info->conflicting_devices = NULL;
10597 info->conflicting_devices_length = 0;
10598
10599 //resource cost is 100 * MIN(1.0, m/M),
10600 //where m is throughput requirement with maximum stream configuration
10601 //and M is CPP maximum throughput.
10602 float max_fps = 0.0;
10603 for (uint32_t i = 0;
10604 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10605 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10606 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10607 }
10608 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10609 gCamCapability[cameraId]->active_array_size.width *
10610 gCamCapability[cameraId]->active_array_size.height * max_fps /
10611 gCamCapability[cameraId]->max_pixel_bandwidth;
10612 info->resource_cost = 100 * MIN(1.0, ratio);
10613 LOGI("camera %d resource cost is %d", cameraId,
10614 info->resource_cost);
10615
10616 pthread_mutex_unlock(&gCamLock);
10617 return rc;
10618}
10619
10620/*===========================================================================
10621 * FUNCTION : translateCapabilityToMetadata
10622 *
10623 * DESCRIPTION: translate the capability into camera_metadata_t
10624 *
10625 * PARAMETERS : type of the request
10626 *
10627 *
10628 * RETURN : success: camera_metadata_t*
10629 * failure: NULL
10630 *
10631 *==========================================================================*/
10632camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10633{
10634 if (mDefaultMetadata[type] != NULL) {
10635 return mDefaultMetadata[type];
10636 }
10637 //first time we are handling this request
10638 //fill up the metadata structure using the wrapper class
10639 CameraMetadata settings;
10640 //translate from cam_capability_t to camera_metadata_tag_t
10641 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10642 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10643 int32_t defaultRequestID = 0;
10644 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10645
10646 /* OIS disable */
10647 char ois_prop[PROPERTY_VALUE_MAX];
10648 memset(ois_prop, 0, sizeof(ois_prop));
10649 property_get("persist.camera.ois.disable", ois_prop, "0");
10650 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10651
10652 /* Force video to use OIS */
10653 char videoOisProp[PROPERTY_VALUE_MAX];
10654 memset(videoOisProp, 0, sizeof(videoOisProp));
10655 property_get("persist.camera.ois.video", videoOisProp, "1");
10656 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010657
10658 // Hybrid AE enable/disable
10659 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10660 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10661 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10662 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10663
Thierry Strudel3d639192016-09-09 11:52:26 -070010664 uint8_t controlIntent = 0;
10665 uint8_t focusMode;
10666 uint8_t vsMode;
10667 uint8_t optStabMode;
10668 uint8_t cacMode;
10669 uint8_t edge_mode;
10670 uint8_t noise_red_mode;
10671 uint8_t tonemap_mode;
10672 bool highQualityModeEntryAvailable = FALSE;
10673 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010674 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010675 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10676 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010677 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010678 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010679
Thierry Strudel3d639192016-09-09 11:52:26 -070010680 switch (type) {
10681 case CAMERA3_TEMPLATE_PREVIEW:
10682 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10683 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10684 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10685 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10686 edge_mode = ANDROID_EDGE_MODE_FAST;
10687 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10688 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10689 break;
10690 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10691 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10692 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10693 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10694 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10695 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10696 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10697 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10698 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10699 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10700 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10701 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10702 highQualityModeEntryAvailable = TRUE;
10703 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10704 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10705 fastModeEntryAvailable = TRUE;
10706 }
10707 }
10708 if (highQualityModeEntryAvailable) {
10709 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10710 } else if (fastModeEntryAvailable) {
10711 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10712 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010713 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10714 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10715 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010716 break;
10717 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10718 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10719 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10720 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010721 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10722 edge_mode = ANDROID_EDGE_MODE_FAST;
10723 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10724 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10725 if (forceVideoOis)
10726 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10727 break;
10728 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10729 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10730 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10731 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010732 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10733 edge_mode = ANDROID_EDGE_MODE_FAST;
10734 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10735 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10736 if (forceVideoOis)
10737 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10738 break;
10739 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10740 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10741 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10742 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10743 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10744 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10745 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10746 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10747 break;
10748 case CAMERA3_TEMPLATE_MANUAL:
10749 edge_mode = ANDROID_EDGE_MODE_FAST;
10750 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10751 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10752 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10753 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10754 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10755 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10756 break;
10757 default:
10758 edge_mode = ANDROID_EDGE_MODE_FAST;
10759 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10760 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10761 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10762 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10763 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10764 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10765 break;
10766 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010767 // Set CAC to OFF if underlying device doesn't support
10768 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10769 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10770 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010771 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10772 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10773 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10774 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10775 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10776 }
10777 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010778 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010779 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010780
10781 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10782 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10783 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10784 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10785 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10786 || ois_disable)
10787 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10788 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010789 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010790
10791 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10792 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10793
10794 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10795 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10796
10797 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10798 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10799
10800 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10801 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10802
10803 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10804 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10805
10806 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10807 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10808
10809 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10810 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10811
10812 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10813 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10814
10815 /*flash*/
10816 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10817 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10818
10819 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10820 settings.update(ANDROID_FLASH_FIRING_POWER,
10821 &flashFiringLevel, 1);
10822
10823 /* lens */
10824 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10825 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10826
10827 if (gCamCapability[mCameraId]->filter_densities_count) {
10828 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10829 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10830 gCamCapability[mCameraId]->filter_densities_count);
10831 }
10832
10833 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10834 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10835
Thierry Strudel3d639192016-09-09 11:52:26 -070010836 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10837 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10838
10839 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10840 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10841
10842 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10843 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10844
10845 /* face detection (default to OFF) */
10846 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10847 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10848
Thierry Strudel54dc9782017-02-15 12:12:10 -080010849 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10850 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010851
10852 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10853 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10854
10855 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10856 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10857
Thierry Strudel3d639192016-09-09 11:52:26 -070010858
10859 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10860 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10861
10862 /* Exposure time(Update the Min Exposure Time)*/
10863 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10864 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10865
10866 /* frame duration */
10867 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10868 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10869
10870 /* sensitivity */
10871 static const int32_t default_sensitivity = 100;
10872 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010873#ifndef USE_HAL_3_3
10874 static const int32_t default_isp_sensitivity =
10875 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10876 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10877#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010878
10879 /*edge mode*/
10880 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10881
10882 /*noise reduction mode*/
10883 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10884
10885 /*color correction mode*/
10886 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10887 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10888
10889 /*transform matrix mode*/
10890 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10891
10892 int32_t scaler_crop_region[4];
10893 scaler_crop_region[0] = 0;
10894 scaler_crop_region[1] = 0;
10895 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10896 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10897 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10898
10899 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10900 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10901
10902 /*focus distance*/
10903 float focus_distance = 0.0;
10904 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10905
10906 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010907 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010908 float max_range = 0.0;
10909 float max_fixed_fps = 0.0;
10910 int32_t fps_range[2] = {0, 0};
10911 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10912 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010913 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10914 TEMPLATE_MAX_PREVIEW_FPS) {
10915 continue;
10916 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010917 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10918 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10919 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10920 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10921 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10922 if (range > max_range) {
10923 fps_range[0] =
10924 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10925 fps_range[1] =
10926 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10927 max_range = range;
10928 }
10929 } else {
10930 if (range < 0.01 && max_fixed_fps <
10931 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10932 fps_range[0] =
10933 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10934 fps_range[1] =
10935 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10936 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10937 }
10938 }
10939 }
10940 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10941
10942 /*precapture trigger*/
10943 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10944 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10945
10946 /*af trigger*/
10947 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10948 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10949
10950 /* ae & af regions */
10951 int32_t active_region[] = {
10952 gCamCapability[mCameraId]->active_array_size.left,
10953 gCamCapability[mCameraId]->active_array_size.top,
10954 gCamCapability[mCameraId]->active_array_size.left +
10955 gCamCapability[mCameraId]->active_array_size.width,
10956 gCamCapability[mCameraId]->active_array_size.top +
10957 gCamCapability[mCameraId]->active_array_size.height,
10958 0};
10959 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10960 sizeof(active_region) / sizeof(active_region[0]));
10961 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10962 sizeof(active_region) / sizeof(active_region[0]));
10963
10964 /* black level lock */
10965 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10966 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10967
Thierry Strudel3d639192016-09-09 11:52:26 -070010968 //special defaults for manual template
10969 if (type == CAMERA3_TEMPLATE_MANUAL) {
10970 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10971 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10972
10973 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10974 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10975
10976 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10977 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10978
10979 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10980 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10981
10982 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10983 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10984
10985 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10986 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10987 }
10988
10989
10990 /* TNR
10991 * We'll use this location to determine which modes TNR will be set.
10992 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10993 * This is not to be confused with linking on a per stream basis that decision
10994 * is still on per-session basis and will be handled as part of config stream
10995 */
10996 uint8_t tnr_enable = 0;
10997
10998 if (m_bTnrPreview || m_bTnrVideo) {
10999
11000 switch (type) {
11001 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11002 tnr_enable = 1;
11003 break;
11004
11005 default:
11006 tnr_enable = 0;
11007 break;
11008 }
11009
11010 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11011 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11012 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11013
11014 LOGD("TNR:%d with process plate %d for template:%d",
11015 tnr_enable, tnr_process_type, type);
11016 }
11017
11018 //Update Link tags to default
11019 int32_t sync_type = CAM_TYPE_STANDALONE;
11020 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11021
11022 int32_t is_main = 0; //this doesn't matter as app should overwrite
11023 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11024
11025 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11026
11027 /* CDS default */
11028 char prop[PROPERTY_VALUE_MAX];
11029 memset(prop, 0, sizeof(prop));
11030 property_get("persist.camera.CDS", prop, "Auto");
11031 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11032 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11033 if (CAM_CDS_MODE_MAX == cds_mode) {
11034 cds_mode = CAM_CDS_MODE_AUTO;
11035 }
11036
11037 /* Disabling CDS in templates which have TNR enabled*/
11038 if (tnr_enable)
11039 cds_mode = CAM_CDS_MODE_OFF;
11040
11041 int32_t mode = cds_mode;
11042 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011043
Thierry Strudel269c81a2016-10-12 12:13:59 -070011044 /* Manual Convergence AEC Speed is disabled by default*/
11045 float default_aec_speed = 0;
11046 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11047
11048 /* Manual Convergence AWB Speed is disabled by default*/
11049 float default_awb_speed = 0;
11050 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11051
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011052 // Set instant AEC to normal convergence by default
11053 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11054 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11055
Shuzhen Wang19463d72016-03-08 11:09:52 -080011056 /* hybrid ae */
11057 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11058
Thierry Strudel3d639192016-09-09 11:52:26 -070011059 mDefaultMetadata[type] = settings.release();
11060
11061 return mDefaultMetadata[type];
11062}
11063
11064/*===========================================================================
11065 * FUNCTION : setFrameParameters
11066 *
11067 * DESCRIPTION: set parameters per frame as requested in the metadata from
11068 * framework
11069 *
11070 * PARAMETERS :
11071 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011072 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011073 * @blob_request: Whether this request is a blob request or not
11074 *
11075 * RETURN : success: NO_ERROR
11076 * failure:
11077 *==========================================================================*/
11078int QCamera3HardwareInterface::setFrameParameters(
11079 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011080 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011081 int blob_request,
11082 uint32_t snapshotStreamId)
11083{
11084 /*translate from camera_metadata_t type to parm_type_t*/
11085 int rc = 0;
11086 int32_t hal_version = CAM_HAL_V3;
11087
11088 clear_metadata_buffer(mParameters);
11089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11090 LOGE("Failed to set hal version in the parameters");
11091 return BAD_VALUE;
11092 }
11093
11094 /*we need to update the frame number in the parameters*/
11095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11096 request->frame_number)) {
11097 LOGE("Failed to set the frame number in the parameters");
11098 return BAD_VALUE;
11099 }
11100
11101 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011103 LOGE("Failed to set stream type mask in the parameters");
11104 return BAD_VALUE;
11105 }
11106
11107 if (mUpdateDebugLevel) {
11108 uint32_t dummyDebugLevel = 0;
11109 /* The value of dummyDebugLevel is irrelavent. On
11110 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11112 dummyDebugLevel)) {
11113 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11114 return BAD_VALUE;
11115 }
11116 mUpdateDebugLevel = false;
11117 }
11118
11119 if(request->settings != NULL){
11120 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11121 if (blob_request)
11122 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11123 }
11124
11125 return rc;
11126}
11127
11128/*===========================================================================
11129 * FUNCTION : setReprocParameters
11130 *
11131 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11132 * return it.
11133 *
11134 * PARAMETERS :
11135 * @request : request that needs to be serviced
11136 *
11137 * RETURN : success: NO_ERROR
11138 * failure:
11139 *==========================================================================*/
11140int32_t QCamera3HardwareInterface::setReprocParameters(
11141 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11142 uint32_t snapshotStreamId)
11143{
11144 /*translate from camera_metadata_t type to parm_type_t*/
11145 int rc = 0;
11146
11147 if (NULL == request->settings){
11148 LOGE("Reprocess settings cannot be NULL");
11149 return BAD_VALUE;
11150 }
11151
11152 if (NULL == reprocParam) {
11153 LOGE("Invalid reprocessing metadata buffer");
11154 return BAD_VALUE;
11155 }
11156 clear_metadata_buffer(reprocParam);
11157
11158 /*we need to update the frame number in the parameters*/
11159 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11160 request->frame_number)) {
11161 LOGE("Failed to set the frame number in the parameters");
11162 return BAD_VALUE;
11163 }
11164
11165 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11166 if (rc < 0) {
11167 LOGE("Failed to translate reproc request");
11168 return rc;
11169 }
11170
11171 CameraMetadata frame_settings;
11172 frame_settings = request->settings;
11173 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11174 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11175 int32_t *crop_count =
11176 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11177 int32_t *crop_data =
11178 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11179 int32_t *roi_map =
11180 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11181 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11182 cam_crop_data_t crop_meta;
11183 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11184 crop_meta.num_of_streams = 1;
11185 crop_meta.crop_info[0].crop.left = crop_data[0];
11186 crop_meta.crop_info[0].crop.top = crop_data[1];
11187 crop_meta.crop_info[0].crop.width = crop_data[2];
11188 crop_meta.crop_info[0].crop.height = crop_data[3];
11189
11190 crop_meta.crop_info[0].roi_map.left =
11191 roi_map[0];
11192 crop_meta.crop_info[0].roi_map.top =
11193 roi_map[1];
11194 crop_meta.crop_info[0].roi_map.width =
11195 roi_map[2];
11196 crop_meta.crop_info[0].roi_map.height =
11197 roi_map[3];
11198
11199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11200 rc = BAD_VALUE;
11201 }
11202 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11203 request->input_buffer->stream,
11204 crop_meta.crop_info[0].crop.left,
11205 crop_meta.crop_info[0].crop.top,
11206 crop_meta.crop_info[0].crop.width,
11207 crop_meta.crop_info[0].crop.height);
11208 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11209 request->input_buffer->stream,
11210 crop_meta.crop_info[0].roi_map.left,
11211 crop_meta.crop_info[0].roi_map.top,
11212 crop_meta.crop_info[0].roi_map.width,
11213 crop_meta.crop_info[0].roi_map.height);
11214 } else {
11215 LOGE("Invalid reprocess crop count %d!", *crop_count);
11216 }
11217 } else {
11218 LOGE("No crop data from matching output stream");
11219 }
11220
11221 /* These settings are not needed for regular requests so handle them specially for
11222 reprocess requests; information needed for EXIF tags */
11223 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11224 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11225 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11226 if (NAME_NOT_FOUND != val) {
11227 uint32_t flashMode = (uint32_t)val;
11228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11229 rc = BAD_VALUE;
11230 }
11231 } else {
11232 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11233 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11234 }
11235 } else {
11236 LOGH("No flash mode in reprocess settings");
11237 }
11238
11239 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11240 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11242 rc = BAD_VALUE;
11243 }
11244 } else {
11245 LOGH("No flash state in reprocess settings");
11246 }
11247
11248 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11249 uint8_t *reprocessFlags =
11250 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11252 *reprocessFlags)) {
11253 rc = BAD_VALUE;
11254 }
11255 }
11256
Thierry Strudel54dc9782017-02-15 12:12:10 -080011257 // Add exif debug data to internal metadata
11258 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11259 mm_jpeg_debug_exif_params_t *debug_params =
11260 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11261 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11262 // AE
11263 if (debug_params->ae_debug_params_valid == TRUE) {
11264 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11265 debug_params->ae_debug_params);
11266 }
11267 // AWB
11268 if (debug_params->awb_debug_params_valid == TRUE) {
11269 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11270 debug_params->awb_debug_params);
11271 }
11272 // AF
11273 if (debug_params->af_debug_params_valid == TRUE) {
11274 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11275 debug_params->af_debug_params);
11276 }
11277 // ASD
11278 if (debug_params->asd_debug_params_valid == TRUE) {
11279 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11280 debug_params->asd_debug_params);
11281 }
11282 // Stats
11283 if (debug_params->stats_debug_params_valid == TRUE) {
11284 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11285 debug_params->stats_debug_params);
11286 }
11287 // BE Stats
11288 if (debug_params->bestats_debug_params_valid == TRUE) {
11289 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11290 debug_params->bestats_debug_params);
11291 }
11292 // BHIST
11293 if (debug_params->bhist_debug_params_valid == TRUE) {
11294 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11295 debug_params->bhist_debug_params);
11296 }
11297 // 3A Tuning
11298 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11299 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11300 debug_params->q3a_tuning_debug_params);
11301 }
11302 }
11303
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011304 // Add metadata which reprocess needs
11305 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11306 cam_reprocess_info_t *repro_info =
11307 (cam_reprocess_info_t *)frame_settings.find
11308 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011309 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011310 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011311 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011312 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011313 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011314 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011315 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011316 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011317 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011318 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011319 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011320 repro_info->pipeline_flip);
11321 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11322 repro_info->af_roi);
11323 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11324 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011325 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11326 CAM_INTF_PARM_ROTATION metadata then has been added in
11327 translateToHalMetadata. HAL need to keep this new rotation
11328 metadata. Otherwise, the old rotation info saved in the vendor tag
11329 would be used */
11330 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11331 CAM_INTF_PARM_ROTATION, reprocParam) {
11332 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11333 } else {
11334 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011335 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011336 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011337 }
11338
11339 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11340 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11341 roi.width and roi.height would be the final JPEG size.
11342 For now, HAL only checks this for reprocess request */
11343 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11344 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11345 uint8_t *enable =
11346 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11347 if (*enable == TRUE) {
11348 int32_t *crop_data =
11349 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11350 cam_stream_crop_info_t crop_meta;
11351 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11352 crop_meta.stream_id = 0;
11353 crop_meta.crop.left = crop_data[0];
11354 crop_meta.crop.top = crop_data[1];
11355 crop_meta.crop.width = crop_data[2];
11356 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011357 // The JPEG crop roi should match cpp output size
11358 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11359 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11360 crop_meta.roi_map.left = 0;
11361 crop_meta.roi_map.top = 0;
11362 crop_meta.roi_map.width = cpp_crop->crop.width;
11363 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011364 }
11365 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11366 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011367 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011368 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011369 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11370 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011371 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011372 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11373
11374 // Add JPEG scale information
11375 cam_dimension_t scale_dim;
11376 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11377 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11378 int32_t *roi =
11379 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11380 scale_dim.width = roi[2];
11381 scale_dim.height = roi[3];
11382 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11383 scale_dim);
11384 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11385 scale_dim.width, scale_dim.height, mCameraId);
11386 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011387 }
11388 }
11389
11390 return rc;
11391}
11392
11393/*===========================================================================
11394 * FUNCTION : saveRequestSettings
11395 *
11396 * DESCRIPTION: Add any settings that might have changed to the request settings
11397 * and save the settings to be applied on the frame
11398 *
11399 * PARAMETERS :
11400 * @jpegMetadata : the extracted and/or modified jpeg metadata
11401 * @request : request with initial settings
11402 *
11403 * RETURN :
11404 * camera_metadata_t* : pointer to the saved request settings
11405 *==========================================================================*/
11406camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11407 const CameraMetadata &jpegMetadata,
11408 camera3_capture_request_t *request)
11409{
11410 camera_metadata_t *resultMetadata;
11411 CameraMetadata camMetadata;
11412 camMetadata = request->settings;
11413
11414 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11415 int32_t thumbnail_size[2];
11416 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11417 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11418 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11419 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11420 }
11421
11422 if (request->input_buffer != NULL) {
11423 uint8_t reprocessFlags = 1;
11424 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11425 (uint8_t*)&reprocessFlags,
11426 sizeof(reprocessFlags));
11427 }
11428
11429 resultMetadata = camMetadata.release();
11430 return resultMetadata;
11431}
11432
11433/*===========================================================================
11434 * FUNCTION : setHalFpsRange
11435 *
11436 * DESCRIPTION: set FPS range parameter
11437 *
11438 *
11439 * PARAMETERS :
11440 * @settings : Metadata from framework
11441 * @hal_metadata: Metadata buffer
11442 *
11443 *
11444 * RETURN : success: NO_ERROR
11445 * failure:
11446 *==========================================================================*/
11447int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11448 metadata_buffer_t *hal_metadata)
11449{
11450 int32_t rc = NO_ERROR;
11451 cam_fps_range_t fps_range;
11452 fps_range.min_fps = (float)
11453 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11454 fps_range.max_fps = (float)
11455 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11456 fps_range.video_min_fps = fps_range.min_fps;
11457 fps_range.video_max_fps = fps_range.max_fps;
11458
11459 LOGD("aeTargetFpsRange fps: [%f %f]",
11460 fps_range.min_fps, fps_range.max_fps);
11461 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11462 * follows:
11463 * ---------------------------------------------------------------|
11464 * Video stream is absent in configure_streams |
11465 * (Camcorder preview before the first video record |
11466 * ---------------------------------------------------------------|
11467 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11468 * | | | vid_min/max_fps|
11469 * ---------------------------------------------------------------|
11470 * NO | [ 30, 240] | 240 | [240, 240] |
11471 * |-------------|-------------|----------------|
11472 * | [240, 240] | 240 | [240, 240] |
11473 * ---------------------------------------------------------------|
11474 * Video stream is present in configure_streams |
11475 * ---------------------------------------------------------------|
11476 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11477 * | | | vid_min/max_fps|
11478 * ---------------------------------------------------------------|
11479 * NO | [ 30, 240] | 240 | [240, 240] |
11480 * (camcorder prev |-------------|-------------|----------------|
11481 * after video rec | [240, 240] | 240 | [240, 240] |
11482 * is stopped) | | | |
11483 * ---------------------------------------------------------------|
11484 * YES | [ 30, 240] | 240 | [240, 240] |
11485 * |-------------|-------------|----------------|
11486 * | [240, 240] | 240 | [240, 240] |
11487 * ---------------------------------------------------------------|
11488 * When Video stream is absent in configure_streams,
11489 * preview fps = sensor_fps / batchsize
11490 * Eg: for 240fps at batchSize 4, preview = 60fps
11491 * for 120fps at batchSize 4, preview = 30fps
11492 *
11493 * When video stream is present in configure_streams, preview fps is as per
11494 * the ratio of preview buffers to video buffers requested in process
11495 * capture request
11496 */
11497 mBatchSize = 0;
11498 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11499 fps_range.min_fps = fps_range.video_max_fps;
11500 fps_range.video_min_fps = fps_range.video_max_fps;
11501 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11502 fps_range.max_fps);
11503 if (NAME_NOT_FOUND != val) {
11504 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11506 return BAD_VALUE;
11507 }
11508
11509 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11510 /* If batchmode is currently in progress and the fps changes,
11511 * set the flag to restart the sensor */
11512 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11513 (mHFRVideoFps != fps_range.max_fps)) {
11514 mNeedSensorRestart = true;
11515 }
11516 mHFRVideoFps = fps_range.max_fps;
11517 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11518 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11519 mBatchSize = MAX_HFR_BATCH_SIZE;
11520 }
11521 }
11522 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11523
11524 }
11525 } else {
11526 /* HFR mode is session param in backend/ISP. This should be reset when
11527 * in non-HFR mode */
11528 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11530 return BAD_VALUE;
11531 }
11532 }
11533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11534 return BAD_VALUE;
11535 }
11536 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11537 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11538 return rc;
11539}
11540
11541/*===========================================================================
11542 * FUNCTION : translateToHalMetadata
11543 *
11544 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11545 *
11546 *
11547 * PARAMETERS :
11548 * @request : request sent from framework
11549 *
11550 *
11551 * RETURN : success: NO_ERROR
11552 * failure:
11553 *==========================================================================*/
11554int QCamera3HardwareInterface::translateToHalMetadata
11555 (const camera3_capture_request_t *request,
11556 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011557 uint32_t snapshotStreamId) {
11558 if (request == nullptr || hal_metadata == nullptr) {
11559 return BAD_VALUE;
11560 }
11561
11562 int64_t minFrameDuration = getMinFrameDuration(request);
11563
11564 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11565 minFrameDuration);
11566}
11567
11568int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11569 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11570 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11571
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 int rc = 0;
11573 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011574 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011575
11576 /* Do not change the order of the following list unless you know what you are
11577 * doing.
11578 * The order is laid out in such a way that parameters in the front of the table
11579 * may be used to override the parameters later in the table. Examples are:
11580 * 1. META_MODE should precede AEC/AWB/AF MODE
11581 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11582 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11583 * 4. Any mode should precede it's corresponding settings
11584 */
11585 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11586 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11588 rc = BAD_VALUE;
11589 }
11590 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11591 if (rc != NO_ERROR) {
11592 LOGE("extractSceneMode failed");
11593 }
11594 }
11595
11596 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11597 uint8_t fwk_aeMode =
11598 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11599 uint8_t aeMode;
11600 int32_t redeye;
11601
11602 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11603 aeMode = CAM_AE_MODE_OFF;
11604 } else {
11605 aeMode = CAM_AE_MODE_ON;
11606 }
11607 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11608 redeye = 1;
11609 } else {
11610 redeye = 0;
11611 }
11612
11613 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11614 fwk_aeMode);
11615 if (NAME_NOT_FOUND != val) {
11616 int32_t flashMode = (int32_t)val;
11617 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11618 }
11619
11620 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11621 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11622 rc = BAD_VALUE;
11623 }
11624 }
11625
11626 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11627 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11628 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11629 fwk_whiteLevel);
11630 if (NAME_NOT_FOUND != val) {
11631 uint8_t whiteLevel = (uint8_t)val;
11632 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11633 rc = BAD_VALUE;
11634 }
11635 }
11636 }
11637
11638 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11639 uint8_t fwk_cacMode =
11640 frame_settings.find(
11641 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11642 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11643 fwk_cacMode);
11644 if (NAME_NOT_FOUND != val) {
11645 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11646 bool entryAvailable = FALSE;
11647 // Check whether Frameworks set CAC mode is supported in device or not
11648 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11649 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11650 entryAvailable = TRUE;
11651 break;
11652 }
11653 }
11654 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11655 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11656 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11657 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11658 if (entryAvailable == FALSE) {
11659 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11660 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11661 } else {
11662 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11663 // High is not supported and so set the FAST as spec say's underlying
11664 // device implementation can be the same for both modes.
11665 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11666 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11667 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11668 // in order to avoid the fps drop due to high quality
11669 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11670 } else {
11671 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11672 }
11673 }
11674 }
11675 LOGD("Final cacMode is %d", cacMode);
11676 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11677 rc = BAD_VALUE;
11678 }
11679 } else {
11680 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11681 }
11682 }
11683
Thierry Strudel2896d122017-02-23 19:18:03 -080011684 char af_value[PROPERTY_VALUE_MAX];
11685 property_get("persist.camera.af.infinity", af_value, "0");
11686
Jason Lee84ae9972017-02-24 13:24:24 -080011687 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011688 if (atoi(af_value) == 0) {
11689 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011690 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011691 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11692 fwk_focusMode);
11693 if (NAME_NOT_FOUND != val) {
11694 uint8_t focusMode = (uint8_t)val;
11695 LOGD("set focus mode %d", focusMode);
11696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11697 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11698 rc = BAD_VALUE;
11699 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011700 }
11701 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011702 } else {
11703 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11704 LOGE("Focus forced to infinity %d", focusMode);
11705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11706 rc = BAD_VALUE;
11707 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011708 }
11709
Jason Lee84ae9972017-02-24 13:24:24 -080011710 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11711 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011712 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11714 focalDistance)) {
11715 rc = BAD_VALUE;
11716 }
11717 }
11718
11719 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11720 uint8_t fwk_antibandingMode =
11721 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11722 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11723 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11724 if (NAME_NOT_FOUND != val) {
11725 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011726 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11727 if (m60HzZone) {
11728 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11729 } else {
11730 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11731 }
11732 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11734 hal_antibandingMode)) {
11735 rc = BAD_VALUE;
11736 }
11737 }
11738 }
11739
11740 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11741 int32_t expCompensation = frame_settings.find(
11742 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11743 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11744 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11745 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11746 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011747 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11749 expCompensation)) {
11750 rc = BAD_VALUE;
11751 }
11752 }
11753
11754 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11755 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11757 rc = BAD_VALUE;
11758 }
11759 }
11760 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11761 rc = setHalFpsRange(frame_settings, hal_metadata);
11762 if (rc != NO_ERROR) {
11763 LOGE("setHalFpsRange failed");
11764 }
11765 }
11766
11767 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11768 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11770 rc = BAD_VALUE;
11771 }
11772 }
11773
11774 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11775 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11776 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11777 fwk_effectMode);
11778 if (NAME_NOT_FOUND != val) {
11779 uint8_t effectMode = (uint8_t)val;
11780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11781 rc = BAD_VALUE;
11782 }
11783 }
11784 }
11785
11786 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11787 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11789 colorCorrectMode)) {
11790 rc = BAD_VALUE;
11791 }
11792 }
11793
11794 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11795 cam_color_correct_gains_t colorCorrectGains;
11796 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11797 colorCorrectGains.gains[i] =
11798 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11799 }
11800 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11801 colorCorrectGains)) {
11802 rc = BAD_VALUE;
11803 }
11804 }
11805
11806 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11807 cam_color_correct_matrix_t colorCorrectTransform;
11808 cam_rational_type_t transform_elem;
11809 size_t num = 0;
11810 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11811 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11812 transform_elem.numerator =
11813 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11814 transform_elem.denominator =
11815 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11816 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11817 num++;
11818 }
11819 }
11820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11821 colorCorrectTransform)) {
11822 rc = BAD_VALUE;
11823 }
11824 }
11825
11826 cam_trigger_t aecTrigger;
11827 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11828 aecTrigger.trigger_id = -1;
11829 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11830 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11831 aecTrigger.trigger =
11832 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11833 aecTrigger.trigger_id =
11834 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11836 aecTrigger)) {
11837 rc = BAD_VALUE;
11838 }
11839 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11840 aecTrigger.trigger, aecTrigger.trigger_id);
11841 }
11842
11843 /*af_trigger must come with a trigger id*/
11844 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11845 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11846 cam_trigger_t af_trigger;
11847 af_trigger.trigger =
11848 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11849 af_trigger.trigger_id =
11850 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11852 rc = BAD_VALUE;
11853 }
11854 LOGD("AfTrigger: %d AfTriggerID: %d",
11855 af_trigger.trigger, af_trigger.trigger_id);
11856 }
11857
11858 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11859 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11861 rc = BAD_VALUE;
11862 }
11863 }
11864 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11865 cam_edge_application_t edge_application;
11866 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011867
Thierry Strudel3d639192016-09-09 11:52:26 -070011868 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11869 edge_application.sharpness = 0;
11870 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011871 edge_application.sharpness =
11872 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11873 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11874 int32_t sharpness =
11875 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11876 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11877 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11878 LOGD("Setting edge mode sharpness %d", sharpness);
11879 edge_application.sharpness = sharpness;
11880 }
11881 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011882 }
11883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11884 rc = BAD_VALUE;
11885 }
11886 }
11887
11888 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11889 int32_t respectFlashMode = 1;
11890 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11891 uint8_t fwk_aeMode =
11892 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11893 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11894 respectFlashMode = 0;
11895 LOGH("AE Mode controls flash, ignore android.flash.mode");
11896 }
11897 }
11898 if (respectFlashMode) {
11899 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11900 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11901 LOGH("flash mode after mapping %d", val);
11902 // To check: CAM_INTF_META_FLASH_MODE usage
11903 if (NAME_NOT_FOUND != val) {
11904 uint8_t flashMode = (uint8_t)val;
11905 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11906 rc = BAD_VALUE;
11907 }
11908 }
11909 }
11910 }
11911
11912 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11913 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11915 rc = BAD_VALUE;
11916 }
11917 }
11918
11919 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11920 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11922 flashFiringTime)) {
11923 rc = BAD_VALUE;
11924 }
11925 }
11926
11927 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11928 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11930 hotPixelMode)) {
11931 rc = BAD_VALUE;
11932 }
11933 }
11934
11935 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11936 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11938 lensAperture)) {
11939 rc = BAD_VALUE;
11940 }
11941 }
11942
11943 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11944 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11945 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11946 filterDensity)) {
11947 rc = BAD_VALUE;
11948 }
11949 }
11950
11951 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11952 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11954 focalLength)) {
11955 rc = BAD_VALUE;
11956 }
11957 }
11958
11959 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11960 uint8_t optStabMode =
11961 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11963 optStabMode)) {
11964 rc = BAD_VALUE;
11965 }
11966 }
11967
11968 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11969 uint8_t videoStabMode =
11970 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11971 LOGD("videoStabMode from APP = %d", videoStabMode);
11972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11973 videoStabMode)) {
11974 rc = BAD_VALUE;
11975 }
11976 }
11977
11978
11979 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11980 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11982 noiseRedMode)) {
11983 rc = BAD_VALUE;
11984 }
11985 }
11986
11987 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11988 float reprocessEffectiveExposureFactor =
11989 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11991 reprocessEffectiveExposureFactor)) {
11992 rc = BAD_VALUE;
11993 }
11994 }
11995
11996 cam_crop_region_t scalerCropRegion;
11997 bool scalerCropSet = false;
11998 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11999 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12000 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12001 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12002 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12003
12004 // Map coordinate system from active array to sensor output.
12005 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12006 scalerCropRegion.width, scalerCropRegion.height);
12007
12008 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12009 scalerCropRegion)) {
12010 rc = BAD_VALUE;
12011 }
12012 scalerCropSet = true;
12013 }
12014
12015 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12016 int64_t sensorExpTime =
12017 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12018 LOGD("setting sensorExpTime %lld", sensorExpTime);
12019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12020 sensorExpTime)) {
12021 rc = BAD_VALUE;
12022 }
12023 }
12024
12025 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12026 int64_t sensorFrameDuration =
12027 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012028 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12029 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12030 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12031 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12033 sensorFrameDuration)) {
12034 rc = BAD_VALUE;
12035 }
12036 }
12037
12038 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12039 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12040 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12041 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12042 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12043 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12044 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12045 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12046 sensorSensitivity)) {
12047 rc = BAD_VALUE;
12048 }
12049 }
12050
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012051#ifndef USE_HAL_3_3
12052 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12053 int32_t ispSensitivity =
12054 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12055 if (ispSensitivity <
12056 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12057 ispSensitivity =
12058 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12059 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12060 }
12061 if (ispSensitivity >
12062 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12063 ispSensitivity =
12064 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12065 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12066 }
12067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12068 ispSensitivity)) {
12069 rc = BAD_VALUE;
12070 }
12071 }
12072#endif
12073
Thierry Strudel3d639192016-09-09 11:52:26 -070012074 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12075 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12077 rc = BAD_VALUE;
12078 }
12079 }
12080
12081 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12082 uint8_t fwk_facedetectMode =
12083 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12084
12085 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12086 fwk_facedetectMode);
12087
12088 if (NAME_NOT_FOUND != val) {
12089 uint8_t facedetectMode = (uint8_t)val;
12090 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12091 facedetectMode)) {
12092 rc = BAD_VALUE;
12093 }
12094 }
12095 }
12096
Thierry Strudel54dc9782017-02-15 12:12:10 -080012097 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012098 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012099 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012100 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12101 histogramMode)) {
12102 rc = BAD_VALUE;
12103 }
12104 }
12105
12106 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12107 uint8_t sharpnessMapMode =
12108 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12110 sharpnessMapMode)) {
12111 rc = BAD_VALUE;
12112 }
12113 }
12114
12115 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12116 uint8_t tonemapMode =
12117 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12119 rc = BAD_VALUE;
12120 }
12121 }
12122 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12123 /*All tonemap channels will have the same number of points*/
12124 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12125 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12126 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12127 cam_rgb_tonemap_curves tonemapCurves;
12128 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12129 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12130 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12131 tonemapCurves.tonemap_points_cnt,
12132 CAM_MAX_TONEMAP_CURVE_SIZE);
12133 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12134 }
12135
12136 /* ch0 = G*/
12137 size_t point = 0;
12138 cam_tonemap_curve_t tonemapCurveGreen;
12139 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12140 for (size_t j = 0; j < 2; j++) {
12141 tonemapCurveGreen.tonemap_points[i][j] =
12142 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12143 point++;
12144 }
12145 }
12146 tonemapCurves.curves[0] = tonemapCurveGreen;
12147
12148 /* ch 1 = B */
12149 point = 0;
12150 cam_tonemap_curve_t tonemapCurveBlue;
12151 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12152 for (size_t j = 0; j < 2; j++) {
12153 tonemapCurveBlue.tonemap_points[i][j] =
12154 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12155 point++;
12156 }
12157 }
12158 tonemapCurves.curves[1] = tonemapCurveBlue;
12159
12160 /* ch 2 = R */
12161 point = 0;
12162 cam_tonemap_curve_t tonemapCurveRed;
12163 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12164 for (size_t j = 0; j < 2; j++) {
12165 tonemapCurveRed.tonemap_points[i][j] =
12166 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12167 point++;
12168 }
12169 }
12170 tonemapCurves.curves[2] = tonemapCurveRed;
12171
12172 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12173 tonemapCurves)) {
12174 rc = BAD_VALUE;
12175 }
12176 }
12177
12178 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12179 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12181 captureIntent)) {
12182 rc = BAD_VALUE;
12183 }
12184 }
12185
12186 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12187 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12189 blackLevelLock)) {
12190 rc = BAD_VALUE;
12191 }
12192 }
12193
12194 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12195 uint8_t lensShadingMapMode =
12196 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12197 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12198 lensShadingMapMode)) {
12199 rc = BAD_VALUE;
12200 }
12201 }
12202
12203 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12204 cam_area_t roi;
12205 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012206 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012207
12208 // Map coordinate system from active array to sensor output.
12209 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12210 roi.rect.height);
12211
12212 if (scalerCropSet) {
12213 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12214 }
12215 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12216 rc = BAD_VALUE;
12217 }
12218 }
12219
12220 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12221 cam_area_t roi;
12222 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012223 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012224
12225 // Map coordinate system from active array to sensor output.
12226 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12227 roi.rect.height);
12228
12229 if (scalerCropSet) {
12230 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12231 }
12232 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12233 rc = BAD_VALUE;
12234 }
12235 }
12236
12237 // CDS for non-HFR non-video mode
12238 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12239 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12240 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12241 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12242 LOGE("Invalid CDS mode %d!", *fwk_cds);
12243 } else {
12244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12245 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12246 rc = BAD_VALUE;
12247 }
12248 }
12249 }
12250
Thierry Strudel04e026f2016-10-10 11:27:36 -070012251 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012252 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012253 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012254 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12255 }
12256 if (m_bVideoHdrEnabled)
12257 vhdr = CAM_VIDEO_HDR_MODE_ON;
12258
Thierry Strudel54dc9782017-02-15 12:12:10 -080012259 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12260
12261 if(vhdr != curr_hdr_state)
12262 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12263
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012264 rc = setVideoHdrMode(mParameters, vhdr);
12265 if (rc != NO_ERROR) {
12266 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012267 }
12268
12269 //IR
12270 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12271 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12272 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012273 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12274 uint8_t isIRon = 0;
12275
12276 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012277 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12278 LOGE("Invalid IR mode %d!", fwk_ir);
12279 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012280 if(isIRon != curr_ir_state )
12281 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12282
Thierry Strudel04e026f2016-10-10 11:27:36 -070012283 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12284 CAM_INTF_META_IR_MODE, fwk_ir)) {
12285 rc = BAD_VALUE;
12286 }
12287 }
12288 }
12289
Thierry Strudel54dc9782017-02-15 12:12:10 -080012290 //Binning Correction Mode
12291 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12292 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12293 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12294 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12295 || (0 > fwk_binning_correction)) {
12296 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12297 } else {
12298 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12299 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12300 rc = BAD_VALUE;
12301 }
12302 }
12303 }
12304
Thierry Strudel269c81a2016-10-12 12:13:59 -070012305 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12306 float aec_speed;
12307 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12308 LOGD("AEC Speed :%f", aec_speed);
12309 if ( aec_speed < 0 ) {
12310 LOGE("Invalid AEC mode %f!", aec_speed);
12311 } else {
12312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12313 aec_speed)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317 }
12318
12319 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12320 float awb_speed;
12321 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12322 LOGD("AWB Speed :%f", awb_speed);
12323 if ( awb_speed < 0 ) {
12324 LOGE("Invalid AWB mode %f!", awb_speed);
12325 } else {
12326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12327 awb_speed)) {
12328 rc = BAD_VALUE;
12329 }
12330 }
12331 }
12332
Thierry Strudel3d639192016-09-09 11:52:26 -070012333 // TNR
12334 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12335 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12336 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012337 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012338 cam_denoise_param_t tnr;
12339 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12340 tnr.process_plates =
12341 (cam_denoise_process_type_t)frame_settings.find(
12342 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12343 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012344
12345 if(b_TnrRequested != curr_tnr_state)
12346 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12347
Thierry Strudel3d639192016-09-09 11:52:26 -070012348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12349 rc = BAD_VALUE;
12350 }
12351 }
12352
Thierry Strudel54dc9782017-02-15 12:12:10 -080012353 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012354 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012355 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12357 *exposure_metering_mode)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
Thierry Strudel3d639192016-09-09 11:52:26 -070012362 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12363 int32_t fwk_testPatternMode =
12364 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12365 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12366 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12367
12368 if (NAME_NOT_FOUND != testPatternMode) {
12369 cam_test_pattern_data_t testPatternData;
12370 memset(&testPatternData, 0, sizeof(testPatternData));
12371 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12372 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12373 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12374 int32_t *fwk_testPatternData =
12375 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12376 testPatternData.r = fwk_testPatternData[0];
12377 testPatternData.b = fwk_testPatternData[3];
12378 switch (gCamCapability[mCameraId]->color_arrangement) {
12379 case CAM_FILTER_ARRANGEMENT_RGGB:
12380 case CAM_FILTER_ARRANGEMENT_GRBG:
12381 testPatternData.gr = fwk_testPatternData[1];
12382 testPatternData.gb = fwk_testPatternData[2];
12383 break;
12384 case CAM_FILTER_ARRANGEMENT_GBRG:
12385 case CAM_FILTER_ARRANGEMENT_BGGR:
12386 testPatternData.gr = fwk_testPatternData[2];
12387 testPatternData.gb = fwk_testPatternData[1];
12388 break;
12389 default:
12390 LOGE("color arrangement %d is not supported",
12391 gCamCapability[mCameraId]->color_arrangement);
12392 break;
12393 }
12394 }
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12396 testPatternData)) {
12397 rc = BAD_VALUE;
12398 }
12399 } else {
12400 LOGE("Invalid framework sensor test pattern mode %d",
12401 fwk_testPatternMode);
12402 }
12403 }
12404
12405 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12406 size_t count = 0;
12407 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12408 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12409 gps_coords.data.d, gps_coords.count, count);
12410 if (gps_coords.count != count) {
12411 rc = BAD_VALUE;
12412 }
12413 }
12414
12415 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12416 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12417 size_t count = 0;
12418 const char *gps_methods_src = (const char *)
12419 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12420 memset(gps_methods, '\0', sizeof(gps_methods));
12421 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12422 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12423 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12424 if (GPS_PROCESSING_METHOD_SIZE != count) {
12425 rc = BAD_VALUE;
12426 }
12427 }
12428
12429 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12430 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12431 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12432 gps_timestamp)) {
12433 rc = BAD_VALUE;
12434 }
12435 }
12436
12437 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12438 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12439 cam_rotation_info_t rotation_info;
12440 if (orientation == 0) {
12441 rotation_info.rotation = ROTATE_0;
12442 } else if (orientation == 90) {
12443 rotation_info.rotation = ROTATE_90;
12444 } else if (orientation == 180) {
12445 rotation_info.rotation = ROTATE_180;
12446 } else if (orientation == 270) {
12447 rotation_info.rotation = ROTATE_270;
12448 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012449 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012450 rotation_info.streamId = snapshotStreamId;
12451 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12453 rc = BAD_VALUE;
12454 }
12455 }
12456
12457 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12458 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12459 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12460 rc = BAD_VALUE;
12461 }
12462 }
12463
12464 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12465 uint32_t thumb_quality = (uint32_t)
12466 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12467 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12468 thumb_quality)) {
12469 rc = BAD_VALUE;
12470 }
12471 }
12472
12473 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12474 cam_dimension_t dim;
12475 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12476 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12478 rc = BAD_VALUE;
12479 }
12480 }
12481
12482 // Internal metadata
12483 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12484 size_t count = 0;
12485 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12486 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12487 privatedata.data.i32, privatedata.count, count);
12488 if (privatedata.count != count) {
12489 rc = BAD_VALUE;
12490 }
12491 }
12492
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012493 // ISO/Exposure Priority
12494 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12495 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12496 cam_priority_mode_t mode =
12497 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12498 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12499 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12500 use_iso_exp_pty.previewOnly = FALSE;
12501 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12502 use_iso_exp_pty.value = *ptr;
12503
12504 if(CAM_ISO_PRIORITY == mode) {
12505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12506 use_iso_exp_pty)) {
12507 rc = BAD_VALUE;
12508 }
12509 }
12510 else {
12511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12512 use_iso_exp_pty)) {
12513 rc = BAD_VALUE;
12514 }
12515 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012516
12517 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12518 rc = BAD_VALUE;
12519 }
12520 }
12521 } else {
12522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12523 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012524 }
12525 }
12526
12527 // Saturation
12528 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12529 int32_t* use_saturation =
12530 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535
Thierry Strudel3d639192016-09-09 11:52:26 -070012536 // EV step
12537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12538 gCamCapability[mCameraId]->exp_compensation_step)) {
12539 rc = BAD_VALUE;
12540 }
12541
12542 // CDS info
12543 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12544 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12545 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12546
12547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12548 CAM_INTF_META_CDS_DATA, *cdsData)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552
Shuzhen Wang19463d72016-03-08 11:09:52 -080012553 // Hybrid AE
12554 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12555 uint8_t *hybrid_ae = (uint8_t *)
12556 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12557
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12559 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
Shuzhen Wang14415f52016-11-16 18:26:18 -080012564 // Histogram
12565 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12566 uint8_t histogramMode =
12567 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12569 histogramMode)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573
12574 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12575 int32_t histogramBins =
12576 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12578 histogramBins)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012583 // Tracking AF
12584 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12585 uint8_t trackingAfTrigger =
12586 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12588 trackingAfTrigger)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
12592
Thierry Strudel3d639192016-09-09 11:52:26 -070012593 return rc;
12594}
12595
12596/*===========================================================================
12597 * FUNCTION : captureResultCb
12598 *
12599 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12600 *
12601 * PARAMETERS :
12602 * @frame : frame information from mm-camera-interface
12603 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12604 * @userdata: userdata
12605 *
12606 * RETURN : NONE
12607 *==========================================================================*/
12608void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12609 camera3_stream_buffer_t *buffer,
12610 uint32_t frame_number, bool isInputBuffer, void *userdata)
12611{
12612 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12613 if (hw == NULL) {
12614 LOGE("Invalid hw %p", hw);
12615 return;
12616 }
12617
12618 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12619 return;
12620}
12621
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012622/*===========================================================================
12623 * FUNCTION : setBufferErrorStatus
12624 *
12625 * DESCRIPTION: Callback handler for channels to report any buffer errors
12626 *
12627 * PARAMETERS :
12628 * @ch : Channel on which buffer error is reported from
12629 * @frame_number : frame number on which buffer error is reported on
12630 * @buffer_status : buffer error status
12631 * @userdata: userdata
12632 *
12633 * RETURN : NONE
12634 *==========================================================================*/
12635void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12636 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12637{
12638 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12639 if (hw == NULL) {
12640 LOGE("Invalid hw %p", hw);
12641 return;
12642 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012643
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012644 hw->setBufferErrorStatus(ch, frame_number, err);
12645 return;
12646}
12647
12648void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12649 uint32_t frameNumber, camera3_buffer_status_t err)
12650{
12651 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12652 pthread_mutex_lock(&mMutex);
12653
12654 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12655 if (req.frame_number != frameNumber)
12656 continue;
12657 for (auto& k : req.mPendingBufferList) {
12658 if(k.stream->priv == ch) {
12659 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12660 }
12661 }
12662 }
12663
12664 pthread_mutex_unlock(&mMutex);
12665 return;
12666}
Thierry Strudel3d639192016-09-09 11:52:26 -070012667/*===========================================================================
12668 * FUNCTION : initialize
12669 *
12670 * DESCRIPTION: Pass framework callback pointers to HAL
12671 *
12672 * PARAMETERS :
12673 *
12674 *
12675 * RETURN : Success : 0
12676 * Failure: -ENODEV
12677 *==========================================================================*/
12678
12679int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12680 const camera3_callback_ops_t *callback_ops)
12681{
12682 LOGD("E");
12683 QCamera3HardwareInterface *hw =
12684 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12685 if (!hw) {
12686 LOGE("NULL camera device");
12687 return -ENODEV;
12688 }
12689
12690 int rc = hw->initialize(callback_ops);
12691 LOGD("X");
12692 return rc;
12693}
12694
12695/*===========================================================================
12696 * FUNCTION : configure_streams
12697 *
12698 * DESCRIPTION:
12699 *
12700 * PARAMETERS :
12701 *
12702 *
12703 * RETURN : Success: 0
12704 * Failure: -EINVAL (if stream configuration is invalid)
12705 * -ENODEV (fatal error)
12706 *==========================================================================*/
12707
12708int QCamera3HardwareInterface::configure_streams(
12709 const struct camera3_device *device,
12710 camera3_stream_configuration_t *stream_list)
12711{
12712 LOGD("E");
12713 QCamera3HardwareInterface *hw =
12714 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12715 if (!hw) {
12716 LOGE("NULL camera device");
12717 return -ENODEV;
12718 }
12719 int rc = hw->configureStreams(stream_list);
12720 LOGD("X");
12721 return rc;
12722}
12723
12724/*===========================================================================
12725 * FUNCTION : construct_default_request_settings
12726 *
12727 * DESCRIPTION: Configure a settings buffer to meet the required use case
12728 *
12729 * PARAMETERS :
12730 *
12731 *
12732 * RETURN : Success: Return valid metadata
12733 * Failure: Return NULL
12734 *==========================================================================*/
12735const camera_metadata_t* QCamera3HardwareInterface::
12736 construct_default_request_settings(const struct camera3_device *device,
12737 int type)
12738{
12739
12740 LOGD("E");
12741 camera_metadata_t* fwk_metadata = NULL;
12742 QCamera3HardwareInterface *hw =
12743 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12744 if (!hw) {
12745 LOGE("NULL camera device");
12746 return NULL;
12747 }
12748
12749 fwk_metadata = hw->translateCapabilityToMetadata(type);
12750
12751 LOGD("X");
12752 return fwk_metadata;
12753}
12754
12755/*===========================================================================
12756 * FUNCTION : process_capture_request
12757 *
12758 * DESCRIPTION:
12759 *
12760 * PARAMETERS :
12761 *
12762 *
12763 * RETURN :
12764 *==========================================================================*/
12765int QCamera3HardwareInterface::process_capture_request(
12766 const struct camera3_device *device,
12767 camera3_capture_request_t *request)
12768{
12769 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012770 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012771 QCamera3HardwareInterface *hw =
12772 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12773 if (!hw) {
12774 LOGE("NULL camera device");
12775 return -EINVAL;
12776 }
12777
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012778 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012779 LOGD("X");
12780 return rc;
12781}
12782
12783/*===========================================================================
12784 * FUNCTION : dump
12785 *
12786 * DESCRIPTION:
12787 *
12788 * PARAMETERS :
12789 *
12790 *
12791 * RETURN :
12792 *==========================================================================*/
12793
12794void QCamera3HardwareInterface::dump(
12795 const struct camera3_device *device, int fd)
12796{
12797 /* Log level property is read when "adb shell dumpsys media.camera" is
12798 called so that the log level can be controlled without restarting
12799 the media server */
12800 getLogLevel();
12801
12802 LOGD("E");
12803 QCamera3HardwareInterface *hw =
12804 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12805 if (!hw) {
12806 LOGE("NULL camera device");
12807 return;
12808 }
12809
12810 hw->dump(fd);
12811 LOGD("X");
12812 return;
12813}
12814
12815/*===========================================================================
12816 * FUNCTION : flush
12817 *
12818 * DESCRIPTION:
12819 *
12820 * PARAMETERS :
12821 *
12822 *
12823 * RETURN :
12824 *==========================================================================*/
12825
12826int QCamera3HardwareInterface::flush(
12827 const struct camera3_device *device)
12828{
12829 int rc;
12830 LOGD("E");
12831 QCamera3HardwareInterface *hw =
12832 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12833 if (!hw) {
12834 LOGE("NULL camera device");
12835 return -EINVAL;
12836 }
12837
12838 pthread_mutex_lock(&hw->mMutex);
12839 // Validate current state
12840 switch (hw->mState) {
12841 case STARTED:
12842 /* valid state */
12843 break;
12844
12845 case ERROR:
12846 pthread_mutex_unlock(&hw->mMutex);
12847 hw->handleCameraDeviceError();
12848 return -ENODEV;
12849
12850 default:
12851 LOGI("Flush returned during state %d", hw->mState);
12852 pthread_mutex_unlock(&hw->mMutex);
12853 return 0;
12854 }
12855 pthread_mutex_unlock(&hw->mMutex);
12856
12857 rc = hw->flush(true /* restart channels */ );
12858 LOGD("X");
12859 return rc;
12860}
12861
12862/*===========================================================================
12863 * FUNCTION : close_camera_device
12864 *
12865 * DESCRIPTION:
12866 *
12867 * PARAMETERS :
12868 *
12869 *
12870 * RETURN :
12871 *==========================================================================*/
12872int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12873{
12874 int ret = NO_ERROR;
12875 QCamera3HardwareInterface *hw =
12876 reinterpret_cast<QCamera3HardwareInterface *>(
12877 reinterpret_cast<camera3_device_t *>(device)->priv);
12878 if (!hw) {
12879 LOGE("NULL camera device");
12880 return BAD_VALUE;
12881 }
12882
12883 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12884 delete hw;
12885 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012886 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012887 return ret;
12888}
12889
12890/*===========================================================================
12891 * FUNCTION : getWaveletDenoiseProcessPlate
12892 *
12893 * DESCRIPTION: query wavelet denoise process plate
12894 *
12895 * PARAMETERS : None
12896 *
12897 * RETURN : WNR prcocess plate value
12898 *==========================================================================*/
12899cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12900{
12901 char prop[PROPERTY_VALUE_MAX];
12902 memset(prop, 0, sizeof(prop));
12903 property_get("persist.denoise.process.plates", prop, "0");
12904 int processPlate = atoi(prop);
12905 switch(processPlate) {
12906 case 0:
12907 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12908 case 1:
12909 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12910 case 2:
12911 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12912 case 3:
12913 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12914 default:
12915 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12916 }
12917}
12918
12919
12920/*===========================================================================
12921 * FUNCTION : getTemporalDenoiseProcessPlate
12922 *
12923 * DESCRIPTION: query temporal denoise process plate
12924 *
12925 * PARAMETERS : None
12926 *
12927 * RETURN : TNR prcocess plate value
12928 *==========================================================================*/
12929cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12930{
12931 char prop[PROPERTY_VALUE_MAX];
12932 memset(prop, 0, sizeof(prop));
12933 property_get("persist.tnr.process.plates", prop, "0");
12934 int processPlate = atoi(prop);
12935 switch(processPlate) {
12936 case 0:
12937 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12938 case 1:
12939 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12940 case 2:
12941 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12942 case 3:
12943 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12944 default:
12945 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12946 }
12947}
12948
12949
12950/*===========================================================================
12951 * FUNCTION : extractSceneMode
12952 *
12953 * DESCRIPTION: Extract scene mode from frameworks set metadata
12954 *
12955 * PARAMETERS :
12956 * @frame_settings: CameraMetadata reference
12957 * @metaMode: ANDROID_CONTORL_MODE
12958 * @hal_metadata: hal metadata structure
12959 *
12960 * RETURN : None
12961 *==========================================================================*/
12962int32_t QCamera3HardwareInterface::extractSceneMode(
12963 const CameraMetadata &frame_settings, uint8_t metaMode,
12964 metadata_buffer_t *hal_metadata)
12965{
12966 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012967 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12968
12969 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12970 LOGD("Ignoring control mode OFF_KEEP_STATE");
12971 return NO_ERROR;
12972 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012973
12974 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12975 camera_metadata_ro_entry entry =
12976 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12977 if (0 == entry.count)
12978 return rc;
12979
12980 uint8_t fwk_sceneMode = entry.data.u8[0];
12981
12982 int val = lookupHalName(SCENE_MODES_MAP,
12983 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
12984 fwk_sceneMode);
12985 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012986 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070012987 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070012988 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012989 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012990
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012991 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
12992 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
12993 }
12994
12995 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
12996 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012997 cam_hdr_param_t hdr_params;
12998 hdr_params.hdr_enable = 1;
12999 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13000 hdr_params.hdr_need_1x = false;
13001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13002 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13003 rc = BAD_VALUE;
13004 }
13005 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013006
Thierry Strudel3d639192016-09-09 11:52:26 -070013007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13008 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13009 rc = BAD_VALUE;
13010 }
13011 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013012
13013 if (mForceHdrSnapshot) {
13014 cam_hdr_param_t hdr_params;
13015 hdr_params.hdr_enable = 1;
13016 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13017 hdr_params.hdr_need_1x = false;
13018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13019 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13020 rc = BAD_VALUE;
13021 }
13022 }
13023
Thierry Strudel3d639192016-09-09 11:52:26 -070013024 return rc;
13025}
13026
13027/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013028 * FUNCTION : setVideoHdrMode
13029 *
13030 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13031 *
13032 * PARAMETERS :
13033 * @hal_metadata: hal metadata structure
13034 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13035 *
13036 * RETURN : None
13037 *==========================================================================*/
13038int32_t QCamera3HardwareInterface::setVideoHdrMode(
13039 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13040{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013041 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13042 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13043 }
13044
13045 LOGE("Invalid Video HDR mode %d!", vhdr);
13046 return BAD_VALUE;
13047}
13048
13049/*===========================================================================
13050 * FUNCTION : setSensorHDR
13051 *
13052 * DESCRIPTION: Enable/disable sensor HDR.
13053 *
13054 * PARAMETERS :
13055 * @hal_metadata: hal metadata structure
13056 * @enable: boolean whether to enable/disable sensor HDR
13057 *
13058 * RETURN : None
13059 *==========================================================================*/
13060int32_t QCamera3HardwareInterface::setSensorHDR(
13061 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13062{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013063 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013064 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13065
13066 if (enable) {
13067 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13068 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13069 #ifdef _LE_CAMERA_
13070 //Default to staggered HDR for IOT
13071 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13072 #else
13073 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13074 #endif
13075 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13076 }
13077
13078 bool isSupported = false;
13079 switch (sensor_hdr) {
13080 case CAM_SENSOR_HDR_IN_SENSOR:
13081 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13082 CAM_QCOM_FEATURE_SENSOR_HDR) {
13083 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013084 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013085 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013086 break;
13087 case CAM_SENSOR_HDR_ZIGZAG:
13088 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13089 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13090 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013091 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013092 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013093 break;
13094 case CAM_SENSOR_HDR_STAGGERED:
13095 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13096 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13097 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013098 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013099 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013100 break;
13101 case CAM_SENSOR_HDR_OFF:
13102 isSupported = true;
13103 LOGD("Turning off sensor HDR");
13104 break;
13105 default:
13106 LOGE("HDR mode %d not supported", sensor_hdr);
13107 rc = BAD_VALUE;
13108 break;
13109 }
13110
13111 if(isSupported) {
13112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13113 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13114 rc = BAD_VALUE;
13115 } else {
13116 if(!isVideoHdrEnable)
13117 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013118 }
13119 }
13120 return rc;
13121}
13122
13123/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013124 * FUNCTION : needRotationReprocess
13125 *
13126 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13127 *
13128 * PARAMETERS : none
13129 *
13130 * RETURN : true: needed
13131 * false: no need
13132 *==========================================================================*/
13133bool QCamera3HardwareInterface::needRotationReprocess()
13134{
13135 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13136 // current rotation is not zero, and pp has the capability to process rotation
13137 LOGH("need do reprocess for rotation");
13138 return true;
13139 }
13140
13141 return false;
13142}
13143
13144/*===========================================================================
13145 * FUNCTION : needReprocess
13146 *
13147 * DESCRIPTION: if reprocess in needed
13148 *
13149 * PARAMETERS : none
13150 *
13151 * RETURN : true: needed
13152 * false: no need
13153 *==========================================================================*/
13154bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13155{
13156 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13157 // TODO: add for ZSL HDR later
13158 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13159 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13160 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13161 return true;
13162 } else {
13163 LOGH("already post processed frame");
13164 return false;
13165 }
13166 }
13167 return needRotationReprocess();
13168}
13169
13170/*===========================================================================
13171 * FUNCTION : needJpegExifRotation
13172 *
13173 * DESCRIPTION: if rotation from jpeg is needed
13174 *
13175 * PARAMETERS : none
13176 *
13177 * RETURN : true: needed
13178 * false: no need
13179 *==========================================================================*/
13180bool QCamera3HardwareInterface::needJpegExifRotation()
13181{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013182 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013183 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13184 LOGD("Need use Jpeg EXIF Rotation");
13185 return true;
13186 }
13187 return false;
13188}
13189
13190/*===========================================================================
13191 * FUNCTION : addOfflineReprocChannel
13192 *
13193 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13194 * coming from input channel
13195 *
13196 * PARAMETERS :
13197 * @config : reprocess configuration
13198 * @inputChHandle : pointer to the input (source) channel
13199 *
13200 *
13201 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13202 *==========================================================================*/
13203QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13204 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13205{
13206 int32_t rc = NO_ERROR;
13207 QCamera3ReprocessChannel *pChannel = NULL;
13208
13209 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013210 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13211 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013212 if (NULL == pChannel) {
13213 LOGE("no mem for reprocess channel");
13214 return NULL;
13215 }
13216
13217 rc = pChannel->initialize(IS_TYPE_NONE);
13218 if (rc != NO_ERROR) {
13219 LOGE("init reprocess channel failed, ret = %d", rc);
13220 delete pChannel;
13221 return NULL;
13222 }
13223
13224 // pp feature config
13225 cam_pp_feature_config_t pp_config;
13226 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13227
13228 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13229 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13230 & CAM_QCOM_FEATURE_DSDN) {
13231 //Use CPP CDS incase h/w supports it.
13232 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13233 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13234 }
13235 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13236 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13237 }
13238
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013239 if (config.hdr_param.hdr_enable) {
13240 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13241 pp_config.hdr_param = config.hdr_param;
13242 }
13243
13244 if (mForceHdrSnapshot) {
13245 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13246 pp_config.hdr_param.hdr_enable = 1;
13247 pp_config.hdr_param.hdr_need_1x = 0;
13248 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13249 }
13250
Thierry Strudel3d639192016-09-09 11:52:26 -070013251 rc = pChannel->addReprocStreamsFromSource(pp_config,
13252 config,
13253 IS_TYPE_NONE,
13254 mMetadataChannel);
13255
13256 if (rc != NO_ERROR) {
13257 delete pChannel;
13258 return NULL;
13259 }
13260 return pChannel;
13261}
13262
13263/*===========================================================================
13264 * FUNCTION : getMobicatMask
13265 *
13266 * DESCRIPTION: returns mobicat mask
13267 *
13268 * PARAMETERS : none
13269 *
13270 * RETURN : mobicat mask
13271 *
13272 *==========================================================================*/
13273uint8_t QCamera3HardwareInterface::getMobicatMask()
13274{
13275 return m_MobicatMask;
13276}
13277
13278/*===========================================================================
13279 * FUNCTION : setMobicat
13280 *
13281 * DESCRIPTION: set Mobicat on/off.
13282 *
13283 * PARAMETERS :
13284 * @params : none
13285 *
13286 * RETURN : int32_t type of status
13287 * NO_ERROR -- success
13288 * none-zero failure code
13289 *==========================================================================*/
13290int32_t QCamera3HardwareInterface::setMobicat()
13291{
13292 char value [PROPERTY_VALUE_MAX];
13293 property_get("persist.camera.mobicat", value, "0");
13294 int32_t ret = NO_ERROR;
13295 uint8_t enableMobi = (uint8_t)atoi(value);
13296
13297 if (enableMobi) {
13298 tune_cmd_t tune_cmd;
13299 tune_cmd.type = SET_RELOAD_CHROMATIX;
13300 tune_cmd.module = MODULE_ALL;
13301 tune_cmd.value = TRUE;
13302 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13303 CAM_INTF_PARM_SET_VFE_COMMAND,
13304 tune_cmd);
13305
13306 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13307 CAM_INTF_PARM_SET_PP_COMMAND,
13308 tune_cmd);
13309 }
13310 m_MobicatMask = enableMobi;
13311
13312 return ret;
13313}
13314
13315/*===========================================================================
13316* FUNCTION : getLogLevel
13317*
13318* DESCRIPTION: Reads the log level property into a variable
13319*
13320* PARAMETERS :
13321* None
13322*
13323* RETURN :
13324* None
13325*==========================================================================*/
13326void QCamera3HardwareInterface::getLogLevel()
13327{
13328 char prop[PROPERTY_VALUE_MAX];
13329 uint32_t globalLogLevel = 0;
13330
13331 property_get("persist.camera.hal.debug", prop, "0");
13332 int val = atoi(prop);
13333 if (0 <= val) {
13334 gCamHal3LogLevel = (uint32_t)val;
13335 }
13336
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013337 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013338 gKpiDebugLevel = atoi(prop);
13339
13340 property_get("persist.camera.global.debug", prop, "0");
13341 val = atoi(prop);
13342 if (0 <= val) {
13343 globalLogLevel = (uint32_t)val;
13344 }
13345
13346 /* Highest log level among hal.logs and global.logs is selected */
13347 if (gCamHal3LogLevel < globalLogLevel)
13348 gCamHal3LogLevel = globalLogLevel;
13349
13350 return;
13351}
13352
13353/*===========================================================================
13354 * FUNCTION : validateStreamRotations
13355 *
13356 * DESCRIPTION: Check if the rotations requested are supported
13357 *
13358 * PARAMETERS :
13359 * @stream_list : streams to be configured
13360 *
13361 * RETURN : NO_ERROR on success
13362 * -EINVAL on failure
13363 *
13364 *==========================================================================*/
13365int QCamera3HardwareInterface::validateStreamRotations(
13366 camera3_stream_configuration_t *streamList)
13367{
13368 int rc = NO_ERROR;
13369
13370 /*
13371 * Loop through all streams requested in configuration
13372 * Check if unsupported rotations have been requested on any of them
13373 */
13374 for (size_t j = 0; j < streamList->num_streams; j++){
13375 camera3_stream_t *newStream = streamList->streams[j];
13376
13377 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13378 bool isImplDef = (newStream->format ==
13379 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13380 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13381 isImplDef);
13382
13383 if (isRotated && (!isImplDef || isZsl)) {
13384 LOGE("Error: Unsupported rotation of %d requested for stream"
13385 "type:%d and stream format:%d",
13386 newStream->rotation, newStream->stream_type,
13387 newStream->format);
13388 rc = -EINVAL;
13389 break;
13390 }
13391 }
13392
13393 return rc;
13394}
13395
13396/*===========================================================================
13397* FUNCTION : getFlashInfo
13398*
13399* DESCRIPTION: Retrieve information about whether the device has a flash.
13400*
13401* PARAMETERS :
13402* @cameraId : Camera id to query
13403* @hasFlash : Boolean indicating whether there is a flash device
13404* associated with given camera
13405* @flashNode : If a flash device exists, this will be its device node.
13406*
13407* RETURN :
13408* None
13409*==========================================================================*/
13410void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13411 bool& hasFlash,
13412 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13413{
13414 cam_capability_t* camCapability = gCamCapability[cameraId];
13415 if (NULL == camCapability) {
13416 hasFlash = false;
13417 flashNode[0] = '\0';
13418 } else {
13419 hasFlash = camCapability->flash_available;
13420 strlcpy(flashNode,
13421 (char*)camCapability->flash_dev_name,
13422 QCAMERA_MAX_FILEPATH_LENGTH);
13423 }
13424}
13425
13426/*===========================================================================
13427* FUNCTION : getEepromVersionInfo
13428*
13429* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13430*
13431* PARAMETERS : None
13432*
13433* RETURN : string describing EEPROM version
13434* "\0" if no such info available
13435*==========================================================================*/
13436const char *QCamera3HardwareInterface::getEepromVersionInfo()
13437{
13438 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13439}
13440
13441/*===========================================================================
13442* FUNCTION : getLdafCalib
13443*
13444* DESCRIPTION: Retrieve Laser AF calibration data
13445*
13446* PARAMETERS : None
13447*
13448* RETURN : Two uint32_t describing laser AF calibration data
13449* NULL if none is available.
13450*==========================================================================*/
13451const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13452{
13453 if (mLdafCalibExist) {
13454 return &mLdafCalib[0];
13455 } else {
13456 return NULL;
13457 }
13458}
13459
13460/*===========================================================================
13461 * FUNCTION : dynamicUpdateMetaStreamInfo
13462 *
13463 * DESCRIPTION: This function:
13464 * (1) stops all the channels
13465 * (2) returns error on pending requests and buffers
13466 * (3) sends metastream_info in setparams
13467 * (4) starts all channels
13468 * This is useful when sensor has to be restarted to apply any
13469 * settings such as frame rate from a different sensor mode
13470 *
13471 * PARAMETERS : None
13472 *
13473 * RETURN : NO_ERROR on success
13474 * Error codes on failure
13475 *
13476 *==========================================================================*/
13477int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13478{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013479 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013480 int rc = NO_ERROR;
13481
13482 LOGD("E");
13483
13484 rc = stopAllChannels();
13485 if (rc < 0) {
13486 LOGE("stopAllChannels failed");
13487 return rc;
13488 }
13489
13490 rc = notifyErrorForPendingRequests();
13491 if (rc < 0) {
13492 LOGE("notifyErrorForPendingRequests failed");
13493 return rc;
13494 }
13495
13496 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13497 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13498 "Format:%d",
13499 mStreamConfigInfo.type[i],
13500 mStreamConfigInfo.stream_sizes[i].width,
13501 mStreamConfigInfo.stream_sizes[i].height,
13502 mStreamConfigInfo.postprocess_mask[i],
13503 mStreamConfigInfo.format[i]);
13504 }
13505
13506 /* Send meta stream info once again so that ISP can start */
13507 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13508 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13509 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13510 mParameters);
13511 if (rc < 0) {
13512 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13513 }
13514
13515 rc = startAllChannels();
13516 if (rc < 0) {
13517 LOGE("startAllChannels failed");
13518 return rc;
13519 }
13520
13521 LOGD("X");
13522 return rc;
13523}
13524
13525/*===========================================================================
13526 * FUNCTION : stopAllChannels
13527 *
13528 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13529 *
13530 * PARAMETERS : None
13531 *
13532 * RETURN : NO_ERROR on success
13533 * Error codes on failure
13534 *
13535 *==========================================================================*/
13536int32_t QCamera3HardwareInterface::stopAllChannels()
13537{
13538 int32_t rc = NO_ERROR;
13539
13540 LOGD("Stopping all channels");
13541 // Stop the Streams/Channels
13542 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13543 it != mStreamInfo.end(); it++) {
13544 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13545 if (channel) {
13546 channel->stop();
13547 }
13548 (*it)->status = INVALID;
13549 }
13550
13551 if (mSupportChannel) {
13552 mSupportChannel->stop();
13553 }
13554 if (mAnalysisChannel) {
13555 mAnalysisChannel->stop();
13556 }
13557 if (mRawDumpChannel) {
13558 mRawDumpChannel->stop();
13559 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013560 if (mHdrPlusRawSrcChannel) {
13561 mHdrPlusRawSrcChannel->stop();
13562 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013563 if (mMetadataChannel) {
13564 /* If content of mStreamInfo is not 0, there is metadata stream */
13565 mMetadataChannel->stop();
13566 }
13567
13568 LOGD("All channels stopped");
13569 return rc;
13570}
13571
13572/*===========================================================================
13573 * FUNCTION : startAllChannels
13574 *
13575 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13576 *
13577 * PARAMETERS : None
13578 *
13579 * RETURN : NO_ERROR on success
13580 * Error codes on failure
13581 *
13582 *==========================================================================*/
13583int32_t QCamera3HardwareInterface::startAllChannels()
13584{
13585 int32_t rc = NO_ERROR;
13586
13587 LOGD("Start all channels ");
13588 // Start the Streams/Channels
13589 if (mMetadataChannel) {
13590 /* If content of mStreamInfo is not 0, there is metadata stream */
13591 rc = mMetadataChannel->start();
13592 if (rc < 0) {
13593 LOGE("META channel start failed");
13594 return rc;
13595 }
13596 }
13597 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13598 it != mStreamInfo.end(); it++) {
13599 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13600 if (channel) {
13601 rc = channel->start();
13602 if (rc < 0) {
13603 LOGE("channel start failed");
13604 return rc;
13605 }
13606 }
13607 }
13608 if (mAnalysisChannel) {
13609 mAnalysisChannel->start();
13610 }
13611 if (mSupportChannel) {
13612 rc = mSupportChannel->start();
13613 if (rc < 0) {
13614 LOGE("Support channel start failed");
13615 return rc;
13616 }
13617 }
13618 if (mRawDumpChannel) {
13619 rc = mRawDumpChannel->start();
13620 if (rc < 0) {
13621 LOGE("RAW dump channel start failed");
13622 return rc;
13623 }
13624 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013625 if (mHdrPlusRawSrcChannel) {
13626 rc = mHdrPlusRawSrcChannel->start();
13627 if (rc < 0) {
13628 LOGE("HDR+ RAW channel start failed");
13629 return rc;
13630 }
13631 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013632
13633 LOGD("All channels started");
13634 return rc;
13635}
13636
13637/*===========================================================================
13638 * FUNCTION : notifyErrorForPendingRequests
13639 *
13640 * DESCRIPTION: This function sends error for all the pending requests/buffers
13641 *
13642 * PARAMETERS : None
13643 *
13644 * RETURN : Error codes
13645 * NO_ERROR on success
13646 *
13647 *==========================================================================*/
13648int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13649{
13650 int32_t rc = NO_ERROR;
13651 unsigned int frameNum = 0;
13652 camera3_capture_result_t result;
13653 camera3_stream_buffer_t *pStream_Buf = NULL;
13654
13655 memset(&result, 0, sizeof(camera3_capture_result_t));
13656
13657 if (mPendingRequestsList.size() > 0) {
13658 pendingRequestIterator i = mPendingRequestsList.begin();
13659 frameNum = i->frame_number;
13660 } else {
13661 /* There might still be pending buffers even though there are
13662 no pending requests. Setting the frameNum to MAX so that
13663 all the buffers with smaller frame numbers are returned */
13664 frameNum = UINT_MAX;
13665 }
13666
13667 LOGH("Oldest frame num on mPendingRequestsList = %u",
13668 frameNum);
13669
Emilian Peev7650c122017-01-19 08:24:33 -080013670 notifyErrorFoPendingDepthData(mDepthChannel);
13671
Thierry Strudel3d639192016-09-09 11:52:26 -070013672 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13673 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13674
13675 if (req->frame_number < frameNum) {
13676 // Send Error notify to frameworks for each buffer for which
13677 // metadata buffer is already sent
13678 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13679 req->frame_number, req->mPendingBufferList.size());
13680
13681 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13682 if (NULL == pStream_Buf) {
13683 LOGE("No memory for pending buffers array");
13684 return NO_MEMORY;
13685 }
13686 memset(pStream_Buf, 0,
13687 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13688 result.result = NULL;
13689 result.frame_number = req->frame_number;
13690 result.num_output_buffers = req->mPendingBufferList.size();
13691 result.output_buffers = pStream_Buf;
13692
13693 size_t index = 0;
13694 for (auto info = req->mPendingBufferList.begin();
13695 info != req->mPendingBufferList.end(); ) {
13696
13697 camera3_notify_msg_t notify_msg;
13698 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13699 notify_msg.type = CAMERA3_MSG_ERROR;
13700 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13701 notify_msg.message.error.error_stream = info->stream;
13702 notify_msg.message.error.frame_number = req->frame_number;
13703 pStream_Buf[index].acquire_fence = -1;
13704 pStream_Buf[index].release_fence = -1;
13705 pStream_Buf[index].buffer = info->buffer;
13706 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13707 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013708 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013709 index++;
13710 // Remove buffer from list
13711 info = req->mPendingBufferList.erase(info);
13712 }
13713
13714 // Remove this request from Map
13715 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13716 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13717 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13718
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013719 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013720
13721 delete [] pStream_Buf;
13722 } else {
13723
13724 // Go through the pending requests info and send error request to framework
13725 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13726
13727 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13728
13729 // Send error notify to frameworks
13730 camera3_notify_msg_t notify_msg;
13731 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13732 notify_msg.type = CAMERA3_MSG_ERROR;
13733 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13734 notify_msg.message.error.error_stream = NULL;
13735 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013736 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013737
13738 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13739 if (NULL == pStream_Buf) {
13740 LOGE("No memory for pending buffers array");
13741 return NO_MEMORY;
13742 }
13743 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13744
13745 result.result = NULL;
13746 result.frame_number = req->frame_number;
13747 result.input_buffer = i->input_buffer;
13748 result.num_output_buffers = req->mPendingBufferList.size();
13749 result.output_buffers = pStream_Buf;
13750
13751 size_t index = 0;
13752 for (auto info = req->mPendingBufferList.begin();
13753 info != req->mPendingBufferList.end(); ) {
13754 pStream_Buf[index].acquire_fence = -1;
13755 pStream_Buf[index].release_fence = -1;
13756 pStream_Buf[index].buffer = info->buffer;
13757 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13758 pStream_Buf[index].stream = info->stream;
13759 index++;
13760 // Remove buffer from list
13761 info = req->mPendingBufferList.erase(info);
13762 }
13763
13764 // Remove this request from Map
13765 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13766 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13767 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13768
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013769 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013770 delete [] pStream_Buf;
13771 i = erasePendingRequest(i);
13772 }
13773 }
13774
13775 /* Reset pending frame Drop list and requests list */
13776 mPendingFrameDropList.clear();
13777
13778 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13779 req.mPendingBufferList.clear();
13780 }
13781 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013782 LOGH("Cleared all the pending buffers ");
13783
13784 return rc;
13785}
13786
13787bool QCamera3HardwareInterface::isOnEncoder(
13788 const cam_dimension_t max_viewfinder_size,
13789 uint32_t width, uint32_t height)
13790{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013791 return ((width > (uint32_t)max_viewfinder_size.width) ||
13792 (height > (uint32_t)max_viewfinder_size.height) ||
13793 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13794 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013795}
13796
13797/*===========================================================================
13798 * FUNCTION : setBundleInfo
13799 *
13800 * DESCRIPTION: Set bundle info for all streams that are bundle.
13801 *
13802 * PARAMETERS : None
13803 *
13804 * RETURN : NO_ERROR on success
13805 * Error codes on failure
13806 *==========================================================================*/
13807int32_t QCamera3HardwareInterface::setBundleInfo()
13808{
13809 int32_t rc = NO_ERROR;
13810
13811 if (mChannelHandle) {
13812 cam_bundle_config_t bundleInfo;
13813 memset(&bundleInfo, 0, sizeof(bundleInfo));
13814 rc = mCameraHandle->ops->get_bundle_info(
13815 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13816 if (rc != NO_ERROR) {
13817 LOGE("get_bundle_info failed");
13818 return rc;
13819 }
13820 if (mAnalysisChannel) {
13821 mAnalysisChannel->setBundleInfo(bundleInfo);
13822 }
13823 if (mSupportChannel) {
13824 mSupportChannel->setBundleInfo(bundleInfo);
13825 }
13826 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13827 it != mStreamInfo.end(); it++) {
13828 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13829 channel->setBundleInfo(bundleInfo);
13830 }
13831 if (mRawDumpChannel) {
13832 mRawDumpChannel->setBundleInfo(bundleInfo);
13833 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013834 if (mHdrPlusRawSrcChannel) {
13835 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13836 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013837 }
13838
13839 return rc;
13840}
13841
13842/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013843 * FUNCTION : setInstantAEC
13844 *
13845 * DESCRIPTION: Set Instant AEC related params.
13846 *
13847 * PARAMETERS :
13848 * @meta: CameraMetadata reference
13849 *
13850 * RETURN : NO_ERROR on success
13851 * Error codes on failure
13852 *==========================================================================*/
13853int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13854{
13855 int32_t rc = NO_ERROR;
13856 uint8_t val = 0;
13857 char prop[PROPERTY_VALUE_MAX];
13858
13859 // First try to configure instant AEC from framework metadata
13860 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13861 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13862 }
13863
13864 // If framework did not set this value, try to read from set prop.
13865 if (val == 0) {
13866 memset(prop, 0, sizeof(prop));
13867 property_get("persist.camera.instant.aec", prop, "0");
13868 val = (uint8_t)atoi(prop);
13869 }
13870
13871 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13872 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13873 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13874 mInstantAEC = val;
13875 mInstantAECSettledFrameNumber = 0;
13876 mInstantAecFrameIdxCount = 0;
13877 LOGH("instantAEC value set %d",val);
13878 if (mInstantAEC) {
13879 memset(prop, 0, sizeof(prop));
13880 property_get("persist.camera.ae.instant.bound", prop, "10");
13881 int32_t aec_frame_skip_cnt = atoi(prop);
13882 if (aec_frame_skip_cnt >= 0) {
13883 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13884 } else {
13885 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13886 rc = BAD_VALUE;
13887 }
13888 }
13889 } else {
13890 LOGE("Bad instant aec value set %d", val);
13891 rc = BAD_VALUE;
13892 }
13893 return rc;
13894}
13895
13896/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013897 * FUNCTION : get_num_overall_buffers
13898 *
13899 * DESCRIPTION: Estimate number of pending buffers across all requests.
13900 *
13901 * PARAMETERS : None
13902 *
13903 * RETURN : Number of overall pending buffers
13904 *
13905 *==========================================================================*/
13906uint32_t PendingBuffersMap::get_num_overall_buffers()
13907{
13908 uint32_t sum_buffers = 0;
13909 for (auto &req : mPendingBuffersInRequest) {
13910 sum_buffers += req.mPendingBufferList.size();
13911 }
13912 return sum_buffers;
13913}
13914
13915/*===========================================================================
13916 * FUNCTION : removeBuf
13917 *
13918 * DESCRIPTION: Remove a matching buffer from tracker.
13919 *
13920 * PARAMETERS : @buffer: image buffer for the callback
13921 *
13922 * RETURN : None
13923 *
13924 *==========================================================================*/
13925void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13926{
13927 bool buffer_found = false;
13928 for (auto req = mPendingBuffersInRequest.begin();
13929 req != mPendingBuffersInRequest.end(); req++) {
13930 for (auto k = req->mPendingBufferList.begin();
13931 k != req->mPendingBufferList.end(); k++ ) {
13932 if (k->buffer == buffer) {
13933 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13934 req->frame_number, buffer);
13935 k = req->mPendingBufferList.erase(k);
13936 if (req->mPendingBufferList.empty()) {
13937 // Remove this request from Map
13938 req = mPendingBuffersInRequest.erase(req);
13939 }
13940 buffer_found = true;
13941 break;
13942 }
13943 }
13944 if (buffer_found) {
13945 break;
13946 }
13947 }
13948 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13949 get_num_overall_buffers());
13950}
13951
13952/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013953 * FUNCTION : getBufErrStatus
13954 *
13955 * DESCRIPTION: get buffer error status
13956 *
13957 * PARAMETERS : @buffer: buffer handle
13958 *
13959 * RETURN : Error status
13960 *
13961 *==========================================================================*/
13962int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13963{
13964 for (auto& req : mPendingBuffersInRequest) {
13965 for (auto& k : req.mPendingBufferList) {
13966 if (k.buffer == buffer)
13967 return k.bufStatus;
13968 }
13969 }
13970 return CAMERA3_BUFFER_STATUS_OK;
13971}
13972
13973/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013974 * FUNCTION : setPAAFSupport
13975 *
13976 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13977 * feature mask according to stream type and filter
13978 * arrangement
13979 *
13980 * PARAMETERS : @feature_mask: current feature mask, which may be modified
13981 * @stream_type: stream type
13982 * @filter_arrangement: filter arrangement
13983 *
13984 * RETURN : None
13985 *==========================================================================*/
13986void QCamera3HardwareInterface::setPAAFSupport(
13987 cam_feature_mask_t& feature_mask,
13988 cam_stream_type_t stream_type,
13989 cam_color_filter_arrangement_t filter_arrangement)
13990{
Thierry Strudel3d639192016-09-09 11:52:26 -070013991 switch (filter_arrangement) {
13992 case CAM_FILTER_ARRANGEMENT_RGGB:
13993 case CAM_FILTER_ARRANGEMENT_GRBG:
13994 case CAM_FILTER_ARRANGEMENT_GBRG:
13995 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013996 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
13997 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070013998 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080013999 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14000 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014001 }
14002 break;
14003 case CAM_FILTER_ARRANGEMENT_Y:
14004 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14005 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14006 }
14007 break;
14008 default:
14009 break;
14010 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014011 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14012 feature_mask, stream_type, filter_arrangement);
14013
14014
Thierry Strudel3d639192016-09-09 11:52:26 -070014015}
14016
14017/*===========================================================================
14018* FUNCTION : getSensorMountAngle
14019*
14020* DESCRIPTION: Retrieve sensor mount angle
14021*
14022* PARAMETERS : None
14023*
14024* RETURN : sensor mount angle in uint32_t
14025*==========================================================================*/
14026uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14027{
14028 return gCamCapability[mCameraId]->sensor_mount_angle;
14029}
14030
14031/*===========================================================================
14032* FUNCTION : getRelatedCalibrationData
14033*
14034* DESCRIPTION: Retrieve related system calibration data
14035*
14036* PARAMETERS : None
14037*
14038* RETURN : Pointer of related system calibration data
14039*==========================================================================*/
14040const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14041{
14042 return (const cam_related_system_calibration_data_t *)
14043 &(gCamCapability[mCameraId]->related_cam_calibration);
14044}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014045
14046/*===========================================================================
14047 * FUNCTION : is60HzZone
14048 *
14049 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14050 *
14051 * PARAMETERS : None
14052 *
14053 * RETURN : True if in 60Hz zone, False otherwise
14054 *==========================================================================*/
14055bool QCamera3HardwareInterface::is60HzZone()
14056{
14057 time_t t = time(NULL);
14058 struct tm lt;
14059
14060 struct tm* r = localtime_r(&t, &lt);
14061
14062 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14063 return true;
14064 else
14065 return false;
14066}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014067
14068/*===========================================================================
14069 * FUNCTION : adjustBlackLevelForCFA
14070 *
14071 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14072 * of bayer CFA (Color Filter Array).
14073 *
14074 * PARAMETERS : @input: black level pattern in the order of RGGB
14075 * @output: black level pattern in the order of CFA
14076 * @color_arrangement: CFA color arrangement
14077 *
14078 * RETURN : None
14079 *==========================================================================*/
14080template<typename T>
14081void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14082 T input[BLACK_LEVEL_PATTERN_CNT],
14083 T output[BLACK_LEVEL_PATTERN_CNT],
14084 cam_color_filter_arrangement_t color_arrangement)
14085{
14086 switch (color_arrangement) {
14087 case CAM_FILTER_ARRANGEMENT_GRBG:
14088 output[0] = input[1];
14089 output[1] = input[0];
14090 output[2] = input[3];
14091 output[3] = input[2];
14092 break;
14093 case CAM_FILTER_ARRANGEMENT_GBRG:
14094 output[0] = input[2];
14095 output[1] = input[3];
14096 output[2] = input[0];
14097 output[3] = input[1];
14098 break;
14099 case CAM_FILTER_ARRANGEMENT_BGGR:
14100 output[0] = input[3];
14101 output[1] = input[2];
14102 output[2] = input[1];
14103 output[3] = input[0];
14104 break;
14105 case CAM_FILTER_ARRANGEMENT_RGGB:
14106 output[0] = input[0];
14107 output[1] = input[1];
14108 output[2] = input[2];
14109 output[3] = input[3];
14110 break;
14111 default:
14112 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14113 break;
14114 }
14115}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014116
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014117void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14118 CameraMetadata &resultMetadata,
14119 std::shared_ptr<metadata_buffer_t> settings)
14120{
14121 if (settings == nullptr) {
14122 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14123 return;
14124 }
14125
14126 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14127 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14128 }
14129
14130 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14131 String8 str((const char *)gps_methods);
14132 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14133 }
14134
14135 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14136 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14137 }
14138
14139 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14140 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14141 }
14142
14143 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14144 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14145 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14146 }
14147
14148 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14149 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14150 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14151 }
14152
14153 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14154 int32_t fwk_thumb_size[2];
14155 fwk_thumb_size[0] = thumb_size->width;
14156 fwk_thumb_size[1] = thumb_size->height;
14157 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14158 }
14159
14160 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14161 uint8_t fwk_intent = intent[0];
14162 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14163 }
14164}
14165
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014166bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14167 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14168 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014169{
14170 if (hdrPlusRequest == nullptr) return false;
14171
14172 // Check noise reduction mode is high quality.
14173 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14174 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14175 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014176 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14177 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014178 return false;
14179 }
14180
14181 // Check edge mode is high quality.
14182 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14183 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14184 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14185 return false;
14186 }
14187
14188 if (request.num_output_buffers != 1 ||
14189 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14190 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014191 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14192 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14193 request.output_buffers[0].stream->width,
14194 request.output_buffers[0].stream->height,
14195 request.output_buffers[0].stream->format);
14196 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014197 return false;
14198 }
14199
14200 // Get a YUV buffer from pic channel.
14201 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14202 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14203 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14204 if (res != OK) {
14205 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14206 __FUNCTION__, strerror(-res), res);
14207 return false;
14208 }
14209
14210 pbcamera::StreamBuffer buffer;
14211 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014212 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014213 buffer.data = yuvBuffer->buffer;
14214 buffer.dataSize = yuvBuffer->frame_len;
14215
14216 pbcamera::CaptureRequest pbRequest;
14217 pbRequest.id = request.frame_number;
14218 pbRequest.outputBuffers.push_back(buffer);
14219
14220 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014221 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014222 if (res != OK) {
14223 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14224 strerror(-res), res);
14225 return false;
14226 }
14227
14228 hdrPlusRequest->yuvBuffer = yuvBuffer;
14229 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14230
14231 return true;
14232}
14233
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014234status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14235 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14236 return OK;
14237 }
14238
14239 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14240 if (res != OK) {
14241 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14242 strerror(-res), res);
14243 return res;
14244 }
14245 gHdrPlusClientOpening = true;
14246
14247 return OK;
14248}
14249
Chien-Yu Chenee335912017-02-09 17:53:20 -080014250status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14251{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014252 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014253
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014254 // Check if gHdrPlusClient is opened or being opened.
14255 if (gHdrPlusClient == nullptr) {
14256 if (gHdrPlusClientOpening) {
14257 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14258 return OK;
14259 }
14260
14261 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014262 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014263 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14264 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014265 return res;
14266 }
14267
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014268 // When opening HDR+ client completes, HDR+ mode will be enabled.
14269 return OK;
14270
Chien-Yu Chenee335912017-02-09 17:53:20 -080014271 }
14272
14273 // Configure stream for HDR+.
14274 res = configureHdrPlusStreamsLocked();
14275 if (res != OK) {
14276 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014277 return res;
14278 }
14279
14280 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14281 res = gHdrPlusClient->setZslHdrPlusMode(true);
14282 if (res != OK) {
14283 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014284 return res;
14285 }
14286
14287 mHdrPlusModeEnabled = true;
14288 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14289
14290 return OK;
14291}
14292
14293void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14294{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014295 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014296 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014297 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14298 if (res != OK) {
14299 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14300 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014301 }
14302
14303 mHdrPlusModeEnabled = false;
14304 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14305}
14306
14307status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014308{
14309 pbcamera::InputConfiguration inputConfig;
14310 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14311 status_t res = OK;
14312
14313 // Configure HDR+ client streams.
14314 // Get input config.
14315 if (mHdrPlusRawSrcChannel) {
14316 // HDR+ input buffers will be provided by HAL.
14317 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14318 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14319 if (res != OK) {
14320 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14321 __FUNCTION__, strerror(-res), res);
14322 return res;
14323 }
14324
14325 inputConfig.isSensorInput = false;
14326 } else {
14327 // Sensor MIPI will send data to Easel.
14328 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014329 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014330 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14331 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14332 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14333 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14334 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14335 if (mSensorModeInfo.num_raw_bits != 10) {
14336 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14337 mSensorModeInfo.num_raw_bits);
14338 return BAD_VALUE;
14339 }
14340
14341 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014342 }
14343
14344 // Get output configurations.
14345 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014346 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014347
14348 // Easel may need to output YUV output buffers if mPictureChannel was created.
14349 pbcamera::StreamConfiguration yuvOutputConfig;
14350 if (mPictureChannel != nullptr) {
14351 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14352 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14353 if (res != OK) {
14354 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14355 __FUNCTION__, strerror(-res), res);
14356
14357 return res;
14358 }
14359
14360 outputStreamConfigs.push_back(yuvOutputConfig);
14361 }
14362
14363 // TODO: consider other channels for YUV output buffers.
14364
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014365 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014366 if (res != OK) {
14367 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14368 strerror(-res), res);
14369 return res;
14370 }
14371
14372 return OK;
14373}
14374
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014375void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14376 if (client == nullptr) {
14377 ALOGE("%s: Opened client is null.", __FUNCTION__);
14378 return;
14379 }
14380
14381 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14382
14383 Mutex::Autolock l(gHdrPlusClientLock);
14384 gHdrPlusClient = std::move(client);
14385 gHdrPlusClientOpening = false;
14386
14387 // Set static metadata.
14388 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14389 if (res != OK) {
14390 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14391 __FUNCTION__, strerror(-res), res);
14392 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14393 gHdrPlusClient = nullptr;
14394 return;
14395 }
14396
14397 // Enable HDR+ mode.
14398 res = enableHdrPlusModeLocked();
14399 if (res != OK) {
14400 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14401 }
14402}
14403
14404void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14405 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14406 Mutex::Autolock l(gHdrPlusClientLock);
14407 gHdrPlusClientOpening = false;
14408}
14409
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014410void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14411 const camera_metadata_t &resultMetadata) {
14412 if (result != nullptr) {
14413 if (result->outputBuffers.size() != 1) {
14414 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14415 result->outputBuffers.size());
14416 return;
14417 }
14418
14419 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14420 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14421 result->outputBuffers[0].streamId);
14422 return;
14423 }
14424
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014425 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014426 HdrPlusPendingRequest pendingRequest;
14427 {
14428 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14429 auto req = mHdrPlusPendingRequests.find(result->requestId);
14430 pendingRequest = req->second;
14431 }
14432
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014433 // Update the result metadata with the settings of the HDR+ still capture request because
14434 // the result metadata belongs to a ZSL buffer.
14435 CameraMetadata metadata;
14436 metadata = &resultMetadata;
14437 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14438 camera_metadata_t* updatedResultMetadata = metadata.release();
14439
14440 QCamera3PicChannel *picChannel =
14441 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14442
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014443 // Check if dumping HDR+ YUV output is enabled.
14444 char prop[PROPERTY_VALUE_MAX];
14445 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14446 bool dumpYuvOutput = atoi(prop);
14447
14448 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014449 // Dump yuv buffer to a ppm file.
14450 pbcamera::StreamConfiguration outputConfig;
14451 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14452 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14453 if (rc == OK) {
14454 char buf[FILENAME_MAX] = {};
14455 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14456 result->requestId, result->outputBuffers[0].streamId,
14457 outputConfig.image.width, outputConfig.image.height);
14458
14459 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14460 } else {
14461 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14462 __FUNCTION__, strerror(-rc), rc);
14463 }
14464 }
14465
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014466 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14467 auto halMetadata = std::make_shared<metadata_buffer_t>();
14468 clear_metadata_buffer(halMetadata.get());
14469
14470 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14471 // encoding.
14472 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14473 halStreamId, /*minFrameDuration*/0);
14474 if (res == OK) {
14475 // Return the buffer to pic channel for encoding.
14476 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14477 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14478 halMetadata);
14479 } else {
14480 // Return the buffer without encoding.
14481 // TODO: This should not happen but we may want to report an error buffer to camera
14482 // service.
14483 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14484 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14485 strerror(-res), res);
14486 }
14487
14488 // Send HDR+ metadata to framework.
14489 {
14490 pthread_mutex_lock(&mMutex);
14491
14492 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14493 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14494 pthread_mutex_unlock(&mMutex);
14495 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014496
14497 // Remove the HDR+ pending request.
14498 {
14499 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14500 auto req = mHdrPlusPendingRequests.find(result->requestId);
14501 mHdrPlusPendingRequests.erase(req);
14502 }
14503 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014504}
14505
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014506void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14507 // TODO: Handle HDR+ capture failures and send the failure to framework.
14508 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14509 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14510
14511 // Return the buffer to pic channel.
14512 QCamera3PicChannel *picChannel =
14513 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14514 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14515
14516 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014517}
14518
Thierry Strudel3d639192016-09-09 11:52:26 -070014519}; //end namespace qcamera