blob: 53d9006d7270cfd62fef43b50b12ddca70f943e2 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Thierry Strudel04e026f2016-10-10 11:27:36 -0700117/* Face landmarks indices */
118#define LEFT_EYE_X 0
119#define LEFT_EYE_Y 1
120#define RIGHT_EYE_X 2
121#define RIGHT_EYE_Y 3
122#define MOUTH_X 4
123#define MOUTH_Y 5
124#define TOTAL_LANDMARK_INDICES 6
125
Zhijun He2a5df222017-04-04 18:20:38 -0700126// Max preferred zoom
127#define MAX_PREFERRED_ZOOM_RATIO 5.0
128
Thierry Strudel3d639192016-09-09 11:52:26 -0700129cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
130const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
131extern pthread_mutex_t gCamLock;
132volatile uint32_t gCamHal3LogLevel = 1;
133extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700134
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800135// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700136// The following Easel related variables must be protected by gHdrPlusClientLock.
137EaselManagerClient gEaselManagerClient;
138bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
139std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
140bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700141bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700142
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800143// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
144bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
146Mutex gHdrPlusClientLock; // Protect above Easel related variables.
147
Thierry Strudel3d639192016-09-09 11:52:26 -0700148
149const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
150 {"On", CAM_CDS_MODE_ON},
151 {"Off", CAM_CDS_MODE_OFF},
152 {"Auto",CAM_CDS_MODE_AUTO}
153};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700154const QCamera3HardwareInterface::QCameraMap<
155 camera_metadata_enum_android_video_hdr_mode_t,
156 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
157 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
158 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
159};
160
Thierry Strudel54dc9782017-02-15 12:12:10 -0800161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_binning_correction_mode_t,
163 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
164 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
165 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167
168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_ir_mode_t,
170 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
171 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
172 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
173 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
174};
Thierry Strudel3d639192016-09-09 11:52:26 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_control_effect_mode_t,
178 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
179 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
180 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
181 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
182 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
183 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
184 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
185 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
186 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
187 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
188};
189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_awb_mode_t,
192 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
193 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
194 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
195 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
196 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
197 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
198 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
199 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
200 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
201 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_scene_mode_t,
206 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
207 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
208 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
209 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
210 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
211 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
212 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
213 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
214 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
215 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
216 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
217 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
218 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
219 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
220 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
221 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800222 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
223 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700224};
225
226const QCamera3HardwareInterface::QCameraMap<
227 camera_metadata_enum_android_control_af_mode_t,
228 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
229 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
230 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
231 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
232 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
233 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
234 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
235 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_color_correction_aberration_mode_t,
240 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
241 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
242 CAM_COLOR_CORRECTION_ABERRATION_OFF },
243 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
244 CAM_COLOR_CORRECTION_ABERRATION_FAST },
245 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
246 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
247};
248
249const QCamera3HardwareInterface::QCameraMap<
250 camera_metadata_enum_android_control_ae_antibanding_mode_t,
251 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
252 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
253 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
254 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
255 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
256};
257
258const QCamera3HardwareInterface::QCameraMap<
259 camera_metadata_enum_android_control_ae_mode_t,
260 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
261 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
262 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
263 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
264 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
265 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
266};
267
268const QCamera3HardwareInterface::QCameraMap<
269 camera_metadata_enum_android_flash_mode_t,
270 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
271 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
272 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
273 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_statistics_face_detect_mode_t,
278 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
279 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
280 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
281 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
286 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
287 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
288 CAM_FOCUS_UNCALIBRATED },
289 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
290 CAM_FOCUS_APPROXIMATE },
291 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
292 CAM_FOCUS_CALIBRATED }
293};
294
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_lens_state_t,
297 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
298 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
299 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
300};
301
302const int32_t available_thumbnail_sizes[] = {0, 0,
303 176, 144,
304 240, 144,
305 256, 144,
306 240, 160,
307 256, 154,
308 240, 240,
309 320, 240};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_sensor_test_pattern_mode_t,
313 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
314 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
315 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
316 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
317 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
318 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
319 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
320};
321
322/* Since there is no mapping for all the options some Android enum are not listed.
323 * Also, the order in this list is important because while mapping from HAL to Android it will
324 * traverse from lower to higher index which means that for HAL values that are map to different
325 * Android values, the traverse logic will select the first one found.
326 */
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_reference_illuminant1_t,
329 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
330 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
335 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
336 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
337 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
346};
347
348const QCamera3HardwareInterface::QCameraMap<
349 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
350 { 60, CAM_HFR_MODE_60FPS},
351 { 90, CAM_HFR_MODE_90FPS},
352 { 120, CAM_HFR_MODE_120FPS},
353 { 150, CAM_HFR_MODE_150FPS},
354 { 180, CAM_HFR_MODE_180FPS},
355 { 210, CAM_HFR_MODE_210FPS},
356 { 240, CAM_HFR_MODE_240FPS},
357 { 480, CAM_HFR_MODE_480FPS},
358};
359
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700360const QCamera3HardwareInterface::QCameraMap<
361 qcamera3_ext_instant_aec_mode_t,
362 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
363 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
364 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
365 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
366};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800367
368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_exposure_meter_mode_t,
370 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
371 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
372 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
373 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
374 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
375 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
376 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
377 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
378};
379
380const QCamera3HardwareInterface::QCameraMap<
381 qcamera3_ext_iso_mode_t,
382 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
383 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
384 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
385 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
386 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
387 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
388 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
389 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
390 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
391};
392
Thierry Strudel3d639192016-09-09 11:52:26 -0700393camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
394 .initialize = QCamera3HardwareInterface::initialize,
395 .configure_streams = QCamera3HardwareInterface::configure_streams,
396 .register_stream_buffers = NULL,
397 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
398 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
399 .get_metadata_vendor_tag_ops = NULL,
400 .dump = QCamera3HardwareInterface::dump,
401 .flush = QCamera3HardwareInterface::flush,
402 .reserved = {0},
403};
404
405// initialise to some default value
406uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
407
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700408static inline void logEaselEvent(const char *tag, const char *event) {
409 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
410 struct timespec ts = {};
411 static int64_t kMsPerSec = 1000;
412 static int64_t kNsPerMs = 1000000;
413 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
414 if (res != OK) {
415 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
416 } else {
417 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
418 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
419 }
420 }
421}
422
Thierry Strudel3d639192016-09-09 11:52:26 -0700423/*===========================================================================
424 * FUNCTION : QCamera3HardwareInterface
425 *
426 * DESCRIPTION: constructor of QCamera3HardwareInterface
427 *
428 * PARAMETERS :
429 * @cameraId : camera ID
430 *
431 * RETURN : none
432 *==========================================================================*/
433QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
434 const camera_module_callbacks_t *callbacks)
435 : mCameraId(cameraId),
436 mCameraHandle(NULL),
437 mCameraInitialized(false),
438 mCallbackOps(NULL),
439 mMetadataChannel(NULL),
440 mPictureChannel(NULL),
441 mRawChannel(NULL),
442 mSupportChannel(NULL),
443 mAnalysisChannel(NULL),
444 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700445 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700446 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800447 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800448 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700449 mChannelHandle(0),
450 mFirstConfiguration(true),
451 mFlush(false),
452 mFlushPerf(false),
453 mParamHeap(NULL),
454 mParameters(NULL),
455 mPrevParameters(NULL),
456 m_bIsVideo(false),
457 m_bIs4KVideo(false),
458 m_bEisSupportedSize(false),
459 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800460 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 m_MobicatMask(0),
462 mMinProcessedFrameDuration(0),
463 mMinJpegFrameDuration(0),
464 mMinRawFrameDuration(0),
465 mMetaFrameCount(0U),
466 mUpdateDebugLevel(false),
467 mCallbacks(callbacks),
468 mCaptureIntent(0),
469 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700470 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800471 /* DevCamDebug metadata internal m control*/
472 mDevCamDebugMetaEnable(0),
473 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700474 mBatchSize(0),
475 mToBeQueuedVidBufs(0),
476 mHFRVideoFps(DEFAULT_VIDEO_FPS),
477 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800478 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800479 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 mFirstFrameNumberInBatch(0),
481 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800482 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700483 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
484 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000485 mPDSupported(false),
486 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700487 mInstantAEC(false),
488 mResetInstantAEC(false),
489 mInstantAECSettledFrameNumber(0),
490 mAecSkipDisplayFrameBound(0),
491 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800492 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700493 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mLastCustIntentFrmNum(-1),
495 mState(CLOSED),
496 mIsDeviceLinked(false),
497 mIsMainCamera(true),
498 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700499 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800500 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800501 mHdrPlusModeEnabled(false),
502 mIsApInputUsedForHdrPlus(false),
503 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800504 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700505{
506 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700507 mCommon.init(gCamCapability[cameraId]);
508 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700509#ifndef USE_HAL_3_3
510 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
511#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700513#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mCameraDevice.common.close = close_camera_device;
515 mCameraDevice.ops = &mCameraOps;
516 mCameraDevice.priv = this;
517 gCamCapability[cameraId]->version = CAM_HAL_V3;
518 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
519 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
520 gCamCapability[cameraId]->min_num_pp_bufs = 3;
521
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800522 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700523
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800524 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mPendingLiveRequest = 0;
526 mCurrentRequestId = -1;
527 pthread_mutex_init(&mMutex, NULL);
528
529 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
530 mDefaultMetadata[i] = NULL;
531
532 // Getting system props of different kinds
533 char prop[PROPERTY_VALUE_MAX];
534 memset(prop, 0, sizeof(prop));
535 property_get("persist.camera.raw.dump", prop, "0");
536 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800537 property_get("persist.camera.hal3.force.hdr", prop, "0");
538 mForceHdrSnapshot = atoi(prop);
539
Thierry Strudel3d639192016-09-09 11:52:26 -0700540 if (mEnableRawDump)
541 LOGD("Raw dump from Camera HAL enabled");
542
543 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
544 memset(mLdafCalib, 0, sizeof(mLdafCalib));
545
546 memset(prop, 0, sizeof(prop));
547 property_get("persist.camera.tnr.preview", prop, "0");
548 m_bTnrPreview = (uint8_t)atoi(prop);
549
550 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800551 property_get("persist.camera.swtnr.preview", prop, "1");
552 m_bSwTnrPreview = (uint8_t)atoi(prop);
553
554 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700555 property_get("persist.camera.tnr.video", prop, "0");
556 m_bTnrVideo = (uint8_t)atoi(prop);
557
558 memset(prop, 0, sizeof(prop));
559 property_get("persist.camera.avtimer.debug", prop, "0");
560 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800561 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700562
Thierry Strudel54dc9782017-02-15 12:12:10 -0800563 memset(prop, 0, sizeof(prop));
564 property_get("persist.camera.cacmode.disable", prop, "0");
565 m_cacModeDisabled = (uint8_t)atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 //Load and read GPU library.
568 lib_surface_utils = NULL;
569 LINK_get_surface_pixel_alignment = NULL;
570 mSurfaceStridePadding = CAM_PAD_TO_32;
571 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
572 if (lib_surface_utils) {
573 *(void **)&LINK_get_surface_pixel_alignment =
574 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
575 if (LINK_get_surface_pixel_alignment) {
576 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
577 }
578 dlclose(lib_surface_utils);
579 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700580
Emilian Peev0f3c3162017-03-15 12:57:46 +0000581 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
582 mPDSupported = (0 <= mPDIndex) ? true : false;
583
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700584 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700585}
586
587/*===========================================================================
588 * FUNCTION : ~QCamera3HardwareInterface
589 *
590 * DESCRIPTION: destructor of QCamera3HardwareInterface
591 *
592 * PARAMETERS : none
593 *
594 * RETURN : none
595 *==========================================================================*/
596QCamera3HardwareInterface::~QCamera3HardwareInterface()
597{
598 LOGD("E");
599
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800600 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700601
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800602 // Disable power hint and enable the perf lock for close camera
603 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
604 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
605
606 // unlink of dualcam during close camera
607 if (mIsDeviceLinked) {
608 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
609 &m_pDualCamCmdPtr->bundle_info;
610 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
611 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
612 pthread_mutex_lock(&gCamLock);
613
614 if (mIsMainCamera == 1) {
615 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
616 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
617 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
618 // related session id should be session id of linked session
619 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
620 } else {
621 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
622 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
623 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
624 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
625 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800626 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800627 pthread_mutex_unlock(&gCamLock);
628
629 rc = mCameraHandle->ops->set_dual_cam_cmd(
630 mCameraHandle->camera_handle);
631 if (rc < 0) {
632 LOGE("Dualcam: Unlink failed, but still proceed to close");
633 }
634 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
636 /* We need to stop all streams before deleting any stream */
637 if (mRawDumpChannel) {
638 mRawDumpChannel->stop();
639 }
640
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700641 if (mHdrPlusRawSrcChannel) {
642 mHdrPlusRawSrcChannel->stop();
643 }
644
Thierry Strudel3d639192016-09-09 11:52:26 -0700645 // NOTE: 'camera3_stream_t *' objects are already freed at
646 // this stage by the framework
647 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
648 it != mStreamInfo.end(); it++) {
649 QCamera3ProcessingChannel *channel = (*it)->channel;
650 if (channel) {
651 channel->stop();
652 }
653 }
654 if (mSupportChannel)
655 mSupportChannel->stop();
656
657 if (mAnalysisChannel) {
658 mAnalysisChannel->stop();
659 }
660 if (mMetadataChannel) {
661 mMetadataChannel->stop();
662 }
663 if (mChannelHandle) {
664 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
665 mChannelHandle);
666 LOGD("stopping channel %d", mChannelHandle);
667 }
668
669 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
670 it != mStreamInfo.end(); it++) {
671 QCamera3ProcessingChannel *channel = (*it)->channel;
672 if (channel)
673 delete channel;
674 free (*it);
675 }
676 if (mSupportChannel) {
677 delete mSupportChannel;
678 mSupportChannel = NULL;
679 }
680
681 if (mAnalysisChannel) {
682 delete mAnalysisChannel;
683 mAnalysisChannel = NULL;
684 }
685 if (mRawDumpChannel) {
686 delete mRawDumpChannel;
687 mRawDumpChannel = NULL;
688 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700689 if (mHdrPlusRawSrcChannel) {
690 delete mHdrPlusRawSrcChannel;
691 mHdrPlusRawSrcChannel = NULL;
692 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700693 if (mDummyBatchChannel) {
694 delete mDummyBatchChannel;
695 mDummyBatchChannel = NULL;
696 }
697
698 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800699 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700700
701 if (mMetadataChannel) {
702 delete mMetadataChannel;
703 mMetadataChannel = NULL;
704 }
705
706 /* Clean up all channels */
707 if (mCameraInitialized) {
708 if(!mFirstConfiguration){
709 //send the last unconfigure
710 cam_stream_size_info_t stream_config_info;
711 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
712 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
713 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800714 m_bIs4KVideo ? 0 :
715 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700716 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700717 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
718 stream_config_info);
719 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
720 if (rc < 0) {
721 LOGE("set_parms failed for unconfigure");
722 }
723 }
724 deinitParameters();
725 }
726
727 if (mChannelHandle) {
728 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
729 mChannelHandle);
730 LOGH("deleting channel %d", mChannelHandle);
731 mChannelHandle = 0;
732 }
733
734 if (mState != CLOSED)
735 closeCamera();
736
737 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
738 req.mPendingBufferList.clear();
739 }
740 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700741 for (pendingRequestIterator i = mPendingRequestsList.begin();
742 i != mPendingRequestsList.end();) {
743 i = erasePendingRequest(i);
744 }
745 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
746 if (mDefaultMetadata[i])
747 free_camera_metadata(mDefaultMetadata[i]);
748
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800749 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700750
751 pthread_cond_destroy(&mRequestCond);
752
753 pthread_cond_destroy(&mBuffersCond);
754
755 pthread_mutex_destroy(&mMutex);
756 LOGD("X");
757}
758
759/*===========================================================================
760 * FUNCTION : erasePendingRequest
761 *
762 * DESCRIPTION: function to erase a desired pending request after freeing any
763 * allocated memory
764 *
765 * PARAMETERS :
766 * @i : iterator pointing to pending request to be erased
767 *
768 * RETURN : iterator pointing to the next request
769 *==========================================================================*/
770QCamera3HardwareInterface::pendingRequestIterator
771 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
772{
773 if (i->input_buffer != NULL) {
774 free(i->input_buffer);
775 i->input_buffer = NULL;
776 }
777 if (i->settings != NULL)
778 free_camera_metadata((camera_metadata_t*)i->settings);
779 return mPendingRequestsList.erase(i);
780}
781
782/*===========================================================================
783 * FUNCTION : camEvtHandle
784 *
785 * DESCRIPTION: Function registered to mm-camera-interface to handle events
786 *
787 * PARAMETERS :
788 * @camera_handle : interface layer camera handle
789 * @evt : ptr to event
790 * @user_data : user data ptr
791 *
792 * RETURN : none
793 *==========================================================================*/
794void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
795 mm_camera_event_t *evt,
796 void *user_data)
797{
798 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
799 if (obj && evt) {
800 switch(evt->server_event_type) {
801 case CAM_EVENT_TYPE_DAEMON_DIED:
802 pthread_mutex_lock(&obj->mMutex);
803 obj->mState = ERROR;
804 pthread_mutex_unlock(&obj->mMutex);
805 LOGE("Fatal, camera daemon died");
806 break;
807
808 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
809 LOGD("HAL got request pull from Daemon");
810 pthread_mutex_lock(&obj->mMutex);
811 obj->mWokenUpByDaemon = true;
812 obj->unblockRequestIfNecessary();
813 pthread_mutex_unlock(&obj->mMutex);
814 break;
815
816 default:
817 LOGW("Warning: Unhandled event %d",
818 evt->server_event_type);
819 break;
820 }
821 } else {
822 LOGE("NULL user_data/evt");
823 }
824}
825
826/*===========================================================================
827 * FUNCTION : openCamera
828 *
829 * DESCRIPTION: open camera
830 *
831 * PARAMETERS :
832 * @hw_device : double ptr for camera device struct
833 *
834 * RETURN : int32_t type of status
835 * NO_ERROR -- success
836 * none-zero failure code
837 *==========================================================================*/
838int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
839{
840 int rc = 0;
841 if (mState != CLOSED) {
842 *hw_device = NULL;
843 return PERMISSION_DENIED;
844 }
845
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800846 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700847 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
848 mCameraId);
849
850 rc = openCamera();
851 if (rc == 0) {
852 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800853 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700854 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700856
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
858 mCameraId, rc);
859
860 if (rc == NO_ERROR) {
861 mState = OPENED;
862 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800863
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 return rc;
865}
866
867/*===========================================================================
868 * FUNCTION : openCamera
869 *
870 * DESCRIPTION: open camera
871 *
872 * PARAMETERS : none
873 *
874 * RETURN : int32_t type of status
875 * NO_ERROR -- success
876 * none-zero failure code
877 *==========================================================================*/
878int QCamera3HardwareInterface::openCamera()
879{
880 int rc = 0;
881 char value[PROPERTY_VALUE_MAX];
882
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800883 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700884 if (mCameraHandle) {
885 LOGE("Failure: Camera already opened");
886 return ALREADY_EXISTS;
887 }
888
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700889 {
890 Mutex::Autolock l(gHdrPlusClientLock);
891 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700892 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700893 rc = gEaselManagerClient.resume();
894 if (rc != 0) {
895 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
896 return rc;
897 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800898 }
899 }
900
Thierry Strudel3d639192016-09-09 11:52:26 -0700901 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
902 if (rc < 0) {
903 LOGE("Failed to reserve flash for camera id: %d",
904 mCameraId);
905 return UNKNOWN_ERROR;
906 }
907
908 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
909 if (rc) {
910 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
911 return rc;
912 }
913
914 if (!mCameraHandle) {
915 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
916 return -ENODEV;
917 }
918
919 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
920 camEvtHandle, (void *)this);
921
922 if (rc < 0) {
923 LOGE("Error, failed to register event callback");
924 /* Not closing camera here since it is already handled in destructor */
925 return FAILED_TRANSACTION;
926 }
927
928 mExifParams.debug_params =
929 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
930 if (mExifParams.debug_params) {
931 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
932 } else {
933 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
934 return NO_MEMORY;
935 }
936 mFirstConfiguration = true;
937
938 //Notify display HAL that a camera session is active.
939 //But avoid calling the same during bootup because camera service might open/close
940 //cameras at boot time during its initialization and display service will also internally
941 //wait for camera service to initialize first while calling this display API, resulting in a
942 //deadlock situation. Since boot time camera open/close calls are made only to fetch
943 //capabilities, no need of this display bw optimization.
944 //Use "service.bootanim.exit" property to know boot status.
945 property_get("service.bootanim.exit", value, "0");
946 if (atoi(value) == 1) {
947 pthread_mutex_lock(&gCamLock);
948 if (gNumCameraSessions++ == 0) {
949 setCameraLaunchStatus(true);
950 }
951 pthread_mutex_unlock(&gCamLock);
952 }
953
954 //fill the session id needed while linking dual cam
955 pthread_mutex_lock(&gCamLock);
956 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
957 &sessionId[mCameraId]);
958 pthread_mutex_unlock(&gCamLock);
959
960 if (rc < 0) {
961 LOGE("Error, failed to get sessiion id");
962 return UNKNOWN_ERROR;
963 } else {
964 //Allocate related cam sync buffer
965 //this is needed for the payload that goes along with bundling cmd for related
966 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700967 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
968 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700969 if(rc != OK) {
970 rc = NO_MEMORY;
971 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
972 return NO_MEMORY;
973 }
974
975 //Map memory for related cam sync buffer
976 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
978 m_pDualCamCmdHeap->getFd(0),
979 sizeof(cam_dual_camera_cmd_info_t),
980 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700981 if(rc < 0) {
982 LOGE("Dualcam: failed to map Related cam sync buffer");
983 rc = FAILED_TRANSACTION;
984 return NO_MEMORY;
985 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 m_pDualCamCmdPtr =
987 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700988 }
989
990 LOGH("mCameraId=%d",mCameraId);
991
992 return NO_ERROR;
993}
994
995/*===========================================================================
996 * FUNCTION : closeCamera
997 *
998 * DESCRIPTION: close camera
999 *
1000 * PARAMETERS : none
1001 *
1002 * RETURN : int32_t type of status
1003 * NO_ERROR -- success
1004 * none-zero failure code
1005 *==========================================================================*/
1006int QCamera3HardwareInterface::closeCamera()
1007{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001008 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001009 int rc = NO_ERROR;
1010 char value[PROPERTY_VALUE_MAX];
1011
1012 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1013 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001014
1015 // unmap memory for related cam sync buffer
1016 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001018 if (NULL != m_pDualCamCmdHeap) {
1019 m_pDualCamCmdHeap->deallocate();
1020 delete m_pDualCamCmdHeap;
1021 m_pDualCamCmdHeap = NULL;
1022 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023 }
1024
Thierry Strudel3d639192016-09-09 11:52:26 -07001025 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1026 mCameraHandle = NULL;
1027
1028 //reset session id to some invalid id
1029 pthread_mutex_lock(&gCamLock);
1030 sessionId[mCameraId] = 0xDEADBEEF;
1031 pthread_mutex_unlock(&gCamLock);
1032
1033 //Notify display HAL that there is no active camera session
1034 //but avoid calling the same during bootup. Refer to openCamera
1035 //for more details.
1036 property_get("service.bootanim.exit", value, "0");
1037 if (atoi(value) == 1) {
1038 pthread_mutex_lock(&gCamLock);
1039 if (--gNumCameraSessions == 0) {
1040 setCameraLaunchStatus(false);
1041 }
1042 pthread_mutex_unlock(&gCamLock);
1043 }
1044
Thierry Strudel3d639192016-09-09 11:52:26 -07001045 if (mExifParams.debug_params) {
1046 free(mExifParams.debug_params);
1047 mExifParams.debug_params = NULL;
1048 }
1049 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1050 LOGW("Failed to release flash for camera id: %d",
1051 mCameraId);
1052 }
1053 mState = CLOSED;
1054 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1055 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001056
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001057 {
1058 Mutex::Autolock l(gHdrPlusClientLock);
1059 if (gHdrPlusClient != nullptr) {
1060 // Disable HDR+ mode.
1061 disableHdrPlusModeLocked();
1062 // Disconnect Easel if it's connected.
1063 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1064 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001065 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001066
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001067 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001068 rc = gEaselManagerClient.stopMipi(mCameraId);
1069 if (rc != 0) {
1070 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1071 }
1072
1073 rc = gEaselManagerClient.suspend();
1074 if (rc != 0) {
1075 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1076 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001077 }
1078 }
1079
Thierry Strudel3d639192016-09-09 11:52:26 -07001080 return rc;
1081}
1082
1083/*===========================================================================
1084 * FUNCTION : initialize
1085 *
1086 * DESCRIPTION: Initialize frameworks callback functions
1087 *
1088 * PARAMETERS :
1089 * @callback_ops : callback function to frameworks
1090 *
1091 * RETURN :
1092 *
1093 *==========================================================================*/
1094int QCamera3HardwareInterface::initialize(
1095 const struct camera3_callback_ops *callback_ops)
1096{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001097 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001098 int rc;
1099
1100 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1101 pthread_mutex_lock(&mMutex);
1102
1103 // Validate current state
1104 switch (mState) {
1105 case OPENED:
1106 /* valid state */
1107 break;
1108 default:
1109 LOGE("Invalid state %d", mState);
1110 rc = -ENODEV;
1111 goto err1;
1112 }
1113
1114 rc = initParameters();
1115 if (rc < 0) {
1116 LOGE("initParamters failed %d", rc);
1117 goto err1;
1118 }
1119 mCallbackOps = callback_ops;
1120
1121 mChannelHandle = mCameraHandle->ops->add_channel(
1122 mCameraHandle->camera_handle, NULL, NULL, this);
1123 if (mChannelHandle == 0) {
1124 LOGE("add_channel failed");
1125 rc = -ENOMEM;
1126 pthread_mutex_unlock(&mMutex);
1127 return rc;
1128 }
1129
1130 pthread_mutex_unlock(&mMutex);
1131 mCameraInitialized = true;
1132 mState = INITIALIZED;
1133 LOGI("X");
1134 return 0;
1135
1136err1:
1137 pthread_mutex_unlock(&mMutex);
1138 return rc;
1139}
1140
1141/*===========================================================================
1142 * FUNCTION : validateStreamDimensions
1143 *
1144 * DESCRIPTION: Check if the configuration requested are those advertised
1145 *
1146 * PARAMETERS :
1147 * @stream_list : streams to be configured
1148 *
1149 * RETURN :
1150 *
1151 *==========================================================================*/
1152int QCamera3HardwareInterface::validateStreamDimensions(
1153 camera3_stream_configuration_t *streamList)
1154{
1155 int rc = NO_ERROR;
1156 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001157 uint32_t depthWidth = 0;
1158 uint32_t depthHeight = 0;
1159 if (mPDSupported) {
1160 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1161 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1162 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001163
1164 camera3_stream_t *inputStream = NULL;
1165 /*
1166 * Loop through all streams to find input stream if it exists*
1167 */
1168 for (size_t i = 0; i< streamList->num_streams; i++) {
1169 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1170 if (inputStream != NULL) {
1171 LOGE("Error, Multiple input streams requested");
1172 return -EINVAL;
1173 }
1174 inputStream = streamList->streams[i];
1175 }
1176 }
1177 /*
1178 * Loop through all streams requested in configuration
1179 * Check if unsupported sizes have been requested on any of them
1180 */
1181 for (size_t j = 0; j < streamList->num_streams; j++) {
1182 bool sizeFound = false;
1183 camera3_stream_t *newStream = streamList->streams[j];
1184
1185 uint32_t rotatedHeight = newStream->height;
1186 uint32_t rotatedWidth = newStream->width;
1187 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1188 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1189 rotatedHeight = newStream->width;
1190 rotatedWidth = newStream->height;
1191 }
1192
1193 /*
1194 * Sizes are different for each type of stream format check against
1195 * appropriate table.
1196 */
1197 switch (newStream->format) {
1198 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1199 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1200 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001201 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1202 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1203 mPDSupported) {
1204 if ((depthWidth == newStream->width) &&
1205 (depthHeight == newStream->height)) {
1206 sizeFound = true;
1207 }
1208 break;
1209 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001210 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1211 for (size_t i = 0; i < count; i++) {
1212 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1213 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1214 sizeFound = true;
1215 break;
1216 }
1217 }
1218 break;
1219 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001220 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1221 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001222 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001223 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001224 if ((depthSamplesCount == newStream->width) &&
1225 (1 == newStream->height)) {
1226 sizeFound = true;
1227 }
1228 break;
1229 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001230 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1231 /* Verify set size against generated sizes table */
1232 for (size_t i = 0; i < count; i++) {
1233 if (((int32_t)rotatedWidth ==
1234 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1235 ((int32_t)rotatedHeight ==
1236 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1237 sizeFound = true;
1238 break;
1239 }
1240 }
1241 break;
1242 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1243 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1244 default:
1245 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1246 || newStream->stream_type == CAMERA3_STREAM_INPUT
1247 || IS_USAGE_ZSL(newStream->usage)) {
1248 if (((int32_t)rotatedWidth ==
1249 gCamCapability[mCameraId]->active_array_size.width) &&
1250 ((int32_t)rotatedHeight ==
1251 gCamCapability[mCameraId]->active_array_size.height)) {
1252 sizeFound = true;
1253 break;
1254 }
1255 /* We could potentially break here to enforce ZSL stream
1256 * set from frameworks always is full active array size
1257 * but it is not clear from the spc if framework will always
1258 * follow that, also we have logic to override to full array
1259 * size, so keeping the logic lenient at the moment
1260 */
1261 }
1262 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1263 MAX_SIZES_CNT);
1264 for (size_t i = 0; i < count; i++) {
1265 if (((int32_t)rotatedWidth ==
1266 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1267 ((int32_t)rotatedHeight ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1269 sizeFound = true;
1270 break;
1271 }
1272 }
1273 break;
1274 } /* End of switch(newStream->format) */
1275
1276 /* We error out even if a single stream has unsupported size set */
1277 if (!sizeFound) {
1278 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1279 rotatedWidth, rotatedHeight, newStream->format,
1280 gCamCapability[mCameraId]->active_array_size.width,
1281 gCamCapability[mCameraId]->active_array_size.height);
1282 rc = -EINVAL;
1283 break;
1284 }
1285 } /* End of for each stream */
1286 return rc;
1287}
1288
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001289/*===========================================================================
1290 * FUNCTION : validateUsageFlags
1291 *
1292 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1293 *
1294 * PARAMETERS :
1295 * @stream_list : streams to be configured
1296 *
1297 * RETURN :
1298 * NO_ERROR if the usage flags are supported
1299 * error code if usage flags are not supported
1300 *
1301 *==========================================================================*/
1302int QCamera3HardwareInterface::validateUsageFlags(
1303 const camera3_stream_configuration_t* streamList)
1304{
1305 for (size_t j = 0; j < streamList->num_streams; j++) {
1306 const camera3_stream_t *newStream = streamList->streams[j];
1307
1308 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1309 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1310 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1311 continue;
1312 }
1313
1314 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1315 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1316 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1317 bool forcePreviewUBWC = true;
1318 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1319 forcePreviewUBWC = false;
1320 }
1321 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1322 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1323 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1324 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1325 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1326 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1327
1328 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1329 // So color spaces will always match.
1330
1331 // Check whether underlying formats of shared streams match.
1332 if (isVideo && isPreview && videoFormat != previewFormat) {
1333 LOGE("Combined video and preview usage flag is not supported");
1334 return -EINVAL;
1335 }
1336 if (isPreview && isZSL && previewFormat != zslFormat) {
1337 LOGE("Combined preview and zsl usage flag is not supported");
1338 return -EINVAL;
1339 }
1340 if (isVideo && isZSL && videoFormat != zslFormat) {
1341 LOGE("Combined video and zsl usage flag is not supported");
1342 return -EINVAL;
1343 }
1344 }
1345 return NO_ERROR;
1346}
1347
1348/*===========================================================================
1349 * FUNCTION : validateUsageFlagsForEis
1350 *
1351 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1352 *
1353 * PARAMETERS :
1354 * @stream_list : streams to be configured
1355 *
1356 * RETURN :
1357 * NO_ERROR if the usage flags are supported
1358 * error code if usage flags are not supported
1359 *
1360 *==========================================================================*/
1361int QCamera3HardwareInterface::validateUsageFlagsForEis(
1362 const camera3_stream_configuration_t* streamList)
1363{
1364 for (size_t j = 0; j < streamList->num_streams; j++) {
1365 const camera3_stream_t *newStream = streamList->streams[j];
1366
1367 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1368 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1369
1370 // Because EIS is "hard-coded" for certain use case, and current
1371 // implementation doesn't support shared preview and video on the same
1372 // stream, return failure if EIS is forced on.
1373 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1374 LOGE("Combined video and preview usage flag is not supported due to EIS");
1375 return -EINVAL;
1376 }
1377 }
1378 return NO_ERROR;
1379}
1380
Thierry Strudel3d639192016-09-09 11:52:26 -07001381/*==============================================================================
1382 * FUNCTION : isSupportChannelNeeded
1383 *
1384 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1385 *
1386 * PARAMETERS :
1387 * @stream_list : streams to be configured
1388 * @stream_config_info : the config info for streams to be configured
1389 *
1390 * RETURN : Boolen true/false decision
1391 *
1392 *==========================================================================*/
1393bool QCamera3HardwareInterface::isSupportChannelNeeded(
1394 camera3_stream_configuration_t *streamList,
1395 cam_stream_size_info_t stream_config_info)
1396{
1397 uint32_t i;
1398 bool pprocRequested = false;
1399 /* Check for conditions where PProc pipeline does not have any streams*/
1400 for (i = 0; i < stream_config_info.num_streams; i++) {
1401 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1402 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1403 pprocRequested = true;
1404 break;
1405 }
1406 }
1407
1408 if (pprocRequested == false )
1409 return true;
1410
1411 /* Dummy stream needed if only raw or jpeg streams present */
1412 for (i = 0; i < streamList->num_streams; i++) {
1413 switch(streamList->streams[i]->format) {
1414 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1415 case HAL_PIXEL_FORMAT_RAW10:
1416 case HAL_PIXEL_FORMAT_RAW16:
1417 case HAL_PIXEL_FORMAT_BLOB:
1418 break;
1419 default:
1420 return false;
1421 }
1422 }
1423 return true;
1424}
1425
1426/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001427 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001428 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001429 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001430 *
1431 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001432 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001433 *
1434 * RETURN : int32_t type of status
1435 * NO_ERROR -- success
1436 * none-zero failure code
1437 *
1438 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001440{
1441 int32_t rc = NO_ERROR;
1442
1443 cam_dimension_t max_dim = {0, 0};
1444 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1445 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1446 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1447 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1448 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1449 }
1450
1451 clear_metadata_buffer(mParameters);
1452
1453 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1454 max_dim);
1455 if (rc != NO_ERROR) {
1456 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1457 return rc;
1458 }
1459
1460 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1461 if (rc != NO_ERROR) {
1462 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1463 return rc;
1464 }
1465
1466 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001467 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001468
1469 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1470 mParameters);
1471 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001473 return rc;
1474 }
1475
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001477 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1478 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1479 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1480 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1481 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001482
1483 return rc;
1484}
1485
1486/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001487 * FUNCTION : addToPPFeatureMask
1488 *
1489 * DESCRIPTION: add additional features to pp feature mask based on
1490 * stream type and usecase
1491 *
1492 * PARAMETERS :
1493 * @stream_format : stream type for feature mask
1494 * @stream_idx : stream idx within postprocess_mask list to change
1495 *
1496 * RETURN : NULL
1497 *
1498 *==========================================================================*/
1499void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1500 uint32_t stream_idx)
1501{
1502 char feature_mask_value[PROPERTY_VALUE_MAX];
1503 cam_feature_mask_t feature_mask;
1504 int args_converted;
1505 int property_len;
1506
1507 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001508#ifdef _LE_CAMERA_
1509 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1510 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1511 property_len = property_get("persist.camera.hal3.feature",
1512 feature_mask_value, swtnr_feature_mask_value);
1513#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 property_len = property_get("persist.camera.hal3.feature",
1515 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001516#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1518 (feature_mask_value[1] == 'x')) {
1519 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1520 } else {
1521 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1522 }
1523 if (1 != args_converted) {
1524 feature_mask = 0;
1525 LOGE("Wrong feature mask %s", feature_mask_value);
1526 return;
1527 }
1528
1529 switch (stream_format) {
1530 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1531 /* Add LLVD to pp feature mask only if video hint is enabled */
1532 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1533 mStreamConfigInfo.postprocess_mask[stream_idx]
1534 |= CAM_QTI_FEATURE_SW_TNR;
1535 LOGH("Added SW TNR to pp feature mask");
1536 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1537 mStreamConfigInfo.postprocess_mask[stream_idx]
1538 |= CAM_QCOM_FEATURE_LLVD;
1539 LOGH("Added LLVD SeeMore to pp feature mask");
1540 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001541 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1542 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1544 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001545 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1546 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1548 CAM_QTI_FEATURE_BINNING_CORRECTION;
1549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001550 break;
1551 }
1552 default:
1553 break;
1554 }
1555 LOGD("PP feature mask %llx",
1556 mStreamConfigInfo.postprocess_mask[stream_idx]);
1557}
1558
1559/*==============================================================================
1560 * FUNCTION : updateFpsInPreviewBuffer
1561 *
1562 * DESCRIPTION: update FPS information in preview buffer.
1563 *
1564 * PARAMETERS :
1565 * @metadata : pointer to metadata buffer
1566 * @frame_number: frame_number to look for in pending buffer list
1567 *
1568 * RETURN : None
1569 *
1570 *==========================================================================*/
1571void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1572 uint32_t frame_number)
1573{
1574 // Mark all pending buffers for this particular request
1575 // with corresponding framerate information
1576 for (List<PendingBuffersInRequest>::iterator req =
1577 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1578 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1579 for(List<PendingBufferInfo>::iterator j =
1580 req->mPendingBufferList.begin();
1581 j != req->mPendingBufferList.end(); j++) {
1582 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1583 if ((req->frame_number == frame_number) &&
1584 (channel->getStreamTypeMask() &
1585 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1586 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1587 CAM_INTF_PARM_FPS_RANGE, metadata) {
1588 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1589 struct private_handle_t *priv_handle =
1590 (struct private_handle_t *)(*(j->buffer));
1591 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1592 }
1593 }
1594 }
1595 }
1596}
1597
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001598/*==============================================================================
1599 * FUNCTION : updateTimeStampInPendingBuffers
1600 *
1601 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1602 * of a frame number
1603 *
1604 * PARAMETERS :
1605 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1606 * @timestamp : timestamp to be set
1607 *
1608 * RETURN : None
1609 *
1610 *==========================================================================*/
1611void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1612 uint32_t frameNumber, nsecs_t timestamp)
1613{
1614 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1615 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1616 if (req->frame_number != frameNumber)
1617 continue;
1618
1619 for (auto k = req->mPendingBufferList.begin();
1620 k != req->mPendingBufferList.end(); k++ ) {
1621 struct private_handle_t *priv_handle =
1622 (struct private_handle_t *) (*(k->buffer));
1623 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1624 }
1625 }
1626 return;
1627}
1628
Thierry Strudel3d639192016-09-09 11:52:26 -07001629/*===========================================================================
1630 * FUNCTION : configureStreams
1631 *
1632 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1633 * and output streams.
1634 *
1635 * PARAMETERS :
1636 * @stream_list : streams to be configured
1637 *
1638 * RETURN :
1639 *
1640 *==========================================================================*/
1641int QCamera3HardwareInterface::configureStreams(
1642 camera3_stream_configuration_t *streamList)
1643{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001644 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001645 int rc = 0;
1646
1647 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001648 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001650 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001651
1652 return rc;
1653}
1654
1655/*===========================================================================
1656 * FUNCTION : configureStreamsPerfLocked
1657 *
1658 * DESCRIPTION: configureStreams while perfLock is held.
1659 *
1660 * PARAMETERS :
1661 * @stream_list : streams to be configured
1662 *
1663 * RETURN : int32_t type of status
1664 * NO_ERROR -- success
1665 * none-zero failure code
1666 *==========================================================================*/
1667int QCamera3HardwareInterface::configureStreamsPerfLocked(
1668 camera3_stream_configuration_t *streamList)
1669{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001670 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001671 int rc = 0;
1672
1673 // Sanity check stream_list
1674 if (streamList == NULL) {
1675 LOGE("NULL stream configuration");
1676 return BAD_VALUE;
1677 }
1678 if (streamList->streams == NULL) {
1679 LOGE("NULL stream list");
1680 return BAD_VALUE;
1681 }
1682
1683 if (streamList->num_streams < 1) {
1684 LOGE("Bad number of streams requested: %d",
1685 streamList->num_streams);
1686 return BAD_VALUE;
1687 }
1688
1689 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1690 LOGE("Maximum number of streams %d exceeded: %d",
1691 MAX_NUM_STREAMS, streamList->num_streams);
1692 return BAD_VALUE;
1693 }
1694
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001695 rc = validateUsageFlags(streamList);
1696 if (rc != NO_ERROR) {
1697 return rc;
1698 }
1699
Thierry Strudel3d639192016-09-09 11:52:26 -07001700 mOpMode = streamList->operation_mode;
1701 LOGD("mOpMode: %d", mOpMode);
1702
1703 /* first invalidate all the steams in the mStreamList
1704 * if they appear again, they will be validated */
1705 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1706 it != mStreamInfo.end(); it++) {
1707 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1708 if (channel) {
1709 channel->stop();
1710 }
1711 (*it)->status = INVALID;
1712 }
1713
1714 if (mRawDumpChannel) {
1715 mRawDumpChannel->stop();
1716 delete mRawDumpChannel;
1717 mRawDumpChannel = NULL;
1718 }
1719
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001720 if (mHdrPlusRawSrcChannel) {
1721 mHdrPlusRawSrcChannel->stop();
1722 delete mHdrPlusRawSrcChannel;
1723 mHdrPlusRawSrcChannel = NULL;
1724 }
1725
Thierry Strudel3d639192016-09-09 11:52:26 -07001726 if (mSupportChannel)
1727 mSupportChannel->stop();
1728
1729 if (mAnalysisChannel) {
1730 mAnalysisChannel->stop();
1731 }
1732 if (mMetadataChannel) {
1733 /* If content of mStreamInfo is not 0, there is metadata stream */
1734 mMetadataChannel->stop();
1735 }
1736 if (mChannelHandle) {
1737 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1738 mChannelHandle);
1739 LOGD("stopping channel %d", mChannelHandle);
1740 }
1741
1742 pthread_mutex_lock(&mMutex);
1743
1744 // Check state
1745 switch (mState) {
1746 case INITIALIZED:
1747 case CONFIGURED:
1748 case STARTED:
1749 /* valid state */
1750 break;
1751 default:
1752 LOGE("Invalid state %d", mState);
1753 pthread_mutex_unlock(&mMutex);
1754 return -ENODEV;
1755 }
1756
1757 /* Check whether we have video stream */
1758 m_bIs4KVideo = false;
1759 m_bIsVideo = false;
1760 m_bEisSupportedSize = false;
1761 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001762 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001763 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001764 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001765 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001766 uint32_t videoWidth = 0U;
1767 uint32_t videoHeight = 0U;
1768 size_t rawStreamCnt = 0;
1769 size_t stallStreamCnt = 0;
1770 size_t processedStreamCnt = 0;
1771 // Number of streams on ISP encoder path
1772 size_t numStreamsOnEncoder = 0;
1773 size_t numYuv888OnEncoder = 0;
1774 bool bYuv888OverrideJpeg = false;
1775 cam_dimension_t largeYuv888Size = {0, 0};
1776 cam_dimension_t maxViewfinderSize = {0, 0};
1777 bool bJpegExceeds4K = false;
1778 bool bJpegOnEncoder = false;
1779 bool bUseCommonFeatureMask = false;
1780 cam_feature_mask_t commonFeatureMask = 0;
1781 bool bSmallJpegSize = false;
1782 uint32_t width_ratio;
1783 uint32_t height_ratio;
1784 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1785 camera3_stream_t *inputStream = NULL;
1786 bool isJpeg = false;
1787 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001788 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001789 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001790
1791 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1792
1793 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001794 uint8_t eis_prop_set;
1795 uint32_t maxEisWidth = 0;
1796 uint32_t maxEisHeight = 0;
1797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001798 // Initialize all instant AEC related variables
1799 mInstantAEC = false;
1800 mResetInstantAEC = false;
1801 mInstantAECSettledFrameNumber = 0;
1802 mAecSkipDisplayFrameBound = 0;
1803 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001804 mCurrFeatureState = 0;
1805 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001806
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1808
1809 size_t count = IS_TYPE_MAX;
1810 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1811 for (size_t i = 0; i < count; i++) {
1812 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001813 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1814 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001815 break;
1816 }
1817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001818
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001819 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001820 maxEisWidth = MAX_EIS_WIDTH;
1821 maxEisHeight = MAX_EIS_HEIGHT;
1822 }
1823
1824 /* EIS setprop control */
1825 char eis_prop[PROPERTY_VALUE_MAX];
1826 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001827 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001828 eis_prop_set = (uint8_t)atoi(eis_prop);
1829
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001830 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1832
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001833 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1834 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001835
Thierry Strudel3d639192016-09-09 11:52:26 -07001836 /* stream configurations */
1837 for (size_t i = 0; i < streamList->num_streams; i++) {
1838 camera3_stream_t *newStream = streamList->streams[i];
1839 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1840 "height = %d, rotation = %d, usage = 0x%x",
1841 i, newStream->stream_type, newStream->format,
1842 newStream->width, newStream->height, newStream->rotation,
1843 newStream->usage);
1844 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1845 newStream->stream_type == CAMERA3_STREAM_INPUT){
1846 isZsl = true;
1847 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001848 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1849 IS_USAGE_PREVIEW(newStream->usage)) {
1850 isPreview = true;
1851 }
1852
Thierry Strudel3d639192016-09-09 11:52:26 -07001853 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1854 inputStream = newStream;
1855 }
1856
Emilian Peev7650c122017-01-19 08:24:33 -08001857 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1858 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001859 isJpeg = true;
1860 jpegSize.width = newStream->width;
1861 jpegSize.height = newStream->height;
1862 if (newStream->width > VIDEO_4K_WIDTH ||
1863 newStream->height > VIDEO_4K_HEIGHT)
1864 bJpegExceeds4K = true;
1865 }
1866
1867 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1868 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1869 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001870 // In HAL3 we can have multiple different video streams.
1871 // The variables video width and height are used below as
1872 // dimensions of the biggest of them
1873 if (videoWidth < newStream->width ||
1874 videoHeight < newStream->height) {
1875 videoWidth = newStream->width;
1876 videoHeight = newStream->height;
1877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001878 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1879 (VIDEO_4K_HEIGHT <= newStream->height)) {
1880 m_bIs4KVideo = true;
1881 }
1882 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1883 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001884
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 }
1886 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1887 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1888 switch (newStream->format) {
1889 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001890 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1891 depthPresent = true;
1892 break;
1893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 stallStreamCnt++;
1895 if (isOnEncoder(maxViewfinderSize, newStream->width,
1896 newStream->height)) {
1897 numStreamsOnEncoder++;
1898 bJpegOnEncoder = true;
1899 }
1900 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1901 newStream->width);
1902 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1903 newStream->height);;
1904 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1905 "FATAL: max_downscale_factor cannot be zero and so assert");
1906 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1907 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1908 LOGH("Setting small jpeg size flag to true");
1909 bSmallJpegSize = true;
1910 }
1911 break;
1912 case HAL_PIXEL_FORMAT_RAW10:
1913 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1914 case HAL_PIXEL_FORMAT_RAW16:
1915 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001916 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1917 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1918 pdStatCount++;
1919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001920 break;
1921 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1922 processedStreamCnt++;
1923 if (isOnEncoder(maxViewfinderSize, newStream->width,
1924 newStream->height)) {
1925 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1926 !IS_USAGE_ZSL(newStream->usage)) {
1927 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1928 }
1929 numStreamsOnEncoder++;
1930 }
1931 break;
1932 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1933 processedStreamCnt++;
1934 if (isOnEncoder(maxViewfinderSize, newStream->width,
1935 newStream->height)) {
1936 // If Yuv888 size is not greater than 4K, set feature mask
1937 // to SUPERSET so that it support concurrent request on
1938 // YUV and JPEG.
1939 if (newStream->width <= VIDEO_4K_WIDTH &&
1940 newStream->height <= VIDEO_4K_HEIGHT) {
1941 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1942 }
1943 numStreamsOnEncoder++;
1944 numYuv888OnEncoder++;
1945 largeYuv888Size.width = newStream->width;
1946 largeYuv888Size.height = newStream->height;
1947 }
1948 break;
1949 default:
1950 processedStreamCnt++;
1951 if (isOnEncoder(maxViewfinderSize, newStream->width,
1952 newStream->height)) {
1953 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1954 numStreamsOnEncoder++;
1955 }
1956 break;
1957 }
1958
1959 }
1960 }
1961
1962 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1963 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1964 !m_bIsVideo) {
1965 m_bEisEnable = false;
1966 }
1967
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001968 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1969 pthread_mutex_unlock(&mMutex);
1970 return -EINVAL;
1971 }
1972
Thierry Strudel54dc9782017-02-15 12:12:10 -08001973 uint8_t forceEnableTnr = 0;
1974 char tnr_prop[PROPERTY_VALUE_MAX];
1975 memset(tnr_prop, 0, sizeof(tnr_prop));
1976 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1977 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1978
Thierry Strudel3d639192016-09-09 11:52:26 -07001979 /* Logic to enable/disable TNR based on specific config size/etc.*/
1980 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1981 ((videoWidth == 1920 && videoHeight == 1080) ||
1982 (videoWidth == 1280 && videoHeight == 720)) &&
1983 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1984 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001985 else if (forceEnableTnr)
1986 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001987
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001988 char videoHdrProp[PROPERTY_VALUE_MAX];
1989 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1990 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1991 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1992
1993 if (hdr_mode_prop == 1 && m_bIsVideo &&
1994 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1995 m_bVideoHdrEnabled = true;
1996 else
1997 m_bVideoHdrEnabled = false;
1998
1999
Thierry Strudel3d639192016-09-09 11:52:26 -07002000 /* Check if num_streams is sane */
2001 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2002 rawStreamCnt > MAX_RAW_STREAMS ||
2003 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2004 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2005 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2006 pthread_mutex_unlock(&mMutex);
2007 return -EINVAL;
2008 }
2009 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002010 if (isZsl && m_bIs4KVideo) {
2011 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 pthread_mutex_unlock(&mMutex);
2013 return -EINVAL;
2014 }
2015 /* Check if stream sizes are sane */
2016 if (numStreamsOnEncoder > 2) {
2017 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2018 pthread_mutex_unlock(&mMutex);
2019 return -EINVAL;
2020 } else if (1 < numStreamsOnEncoder){
2021 bUseCommonFeatureMask = true;
2022 LOGH("Multiple streams above max viewfinder size, common mask needed");
2023 }
2024
2025 /* Check if BLOB size is greater than 4k in 4k recording case */
2026 if (m_bIs4KVideo && bJpegExceeds4K) {
2027 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2028 pthread_mutex_unlock(&mMutex);
2029 return -EINVAL;
2030 }
2031
Emilian Peev7650c122017-01-19 08:24:33 -08002032 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2033 depthPresent) {
2034 LOGE("HAL doesn't support depth streams in HFR mode!");
2035 pthread_mutex_unlock(&mMutex);
2036 return -EINVAL;
2037 }
2038
Thierry Strudel3d639192016-09-09 11:52:26 -07002039 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2040 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2041 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2042 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2043 // configurations:
2044 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2045 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2046 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2047 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2048 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2049 __func__);
2050 pthread_mutex_unlock(&mMutex);
2051 return -EINVAL;
2052 }
2053
2054 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2055 // the YUV stream's size is greater or equal to the JPEG size, set common
2056 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2057 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2058 jpegSize.width, jpegSize.height) &&
2059 largeYuv888Size.width > jpegSize.width &&
2060 largeYuv888Size.height > jpegSize.height) {
2061 bYuv888OverrideJpeg = true;
2062 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2063 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2064 }
2065
2066 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2067 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2068 commonFeatureMask);
2069 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2070 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2071
2072 rc = validateStreamDimensions(streamList);
2073 if (rc == NO_ERROR) {
2074 rc = validateStreamRotations(streamList);
2075 }
2076 if (rc != NO_ERROR) {
2077 LOGE("Invalid stream configuration requested!");
2078 pthread_mutex_unlock(&mMutex);
2079 return rc;
2080 }
2081
Emilian Peev0f3c3162017-03-15 12:57:46 +00002082 if (1 < pdStatCount) {
2083 LOGE("HAL doesn't support multiple PD streams");
2084 pthread_mutex_unlock(&mMutex);
2085 return -EINVAL;
2086 }
2087
2088 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2089 (1 == pdStatCount)) {
2090 LOGE("HAL doesn't support PD streams in HFR mode!");
2091 pthread_mutex_unlock(&mMutex);
2092 return -EINVAL;
2093 }
2094
Thierry Strudel3d639192016-09-09 11:52:26 -07002095 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2096 for (size_t i = 0; i < streamList->num_streams; i++) {
2097 camera3_stream_t *newStream = streamList->streams[i];
2098 LOGH("newStream type = %d, stream format = %d "
2099 "stream size : %d x %d, stream rotation = %d",
2100 newStream->stream_type, newStream->format,
2101 newStream->width, newStream->height, newStream->rotation);
2102 //if the stream is in the mStreamList validate it
2103 bool stream_exists = false;
2104 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2105 it != mStreamInfo.end(); it++) {
2106 if ((*it)->stream == newStream) {
2107 QCamera3ProcessingChannel *channel =
2108 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2109 stream_exists = true;
2110 if (channel)
2111 delete channel;
2112 (*it)->status = VALID;
2113 (*it)->stream->priv = NULL;
2114 (*it)->channel = NULL;
2115 }
2116 }
2117 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2118 //new stream
2119 stream_info_t* stream_info;
2120 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2121 if (!stream_info) {
2122 LOGE("Could not allocate stream info");
2123 rc = -ENOMEM;
2124 pthread_mutex_unlock(&mMutex);
2125 return rc;
2126 }
2127 stream_info->stream = newStream;
2128 stream_info->status = VALID;
2129 stream_info->channel = NULL;
2130 mStreamInfo.push_back(stream_info);
2131 }
2132 /* Covers Opaque ZSL and API1 F/W ZSL */
2133 if (IS_USAGE_ZSL(newStream->usage)
2134 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2135 if (zslStream != NULL) {
2136 LOGE("Multiple input/reprocess streams requested!");
2137 pthread_mutex_unlock(&mMutex);
2138 return BAD_VALUE;
2139 }
2140 zslStream = newStream;
2141 }
2142 /* Covers YUV reprocess */
2143 if (inputStream != NULL) {
2144 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2145 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2146 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2147 && inputStream->width == newStream->width
2148 && inputStream->height == newStream->height) {
2149 if (zslStream != NULL) {
2150 /* This scenario indicates multiple YUV streams with same size
2151 * as input stream have been requested, since zsl stream handle
2152 * is solely use for the purpose of overriding the size of streams
2153 * which share h/w streams we will just make a guess here as to
2154 * which of the stream is a ZSL stream, this will be refactored
2155 * once we make generic logic for streams sharing encoder output
2156 */
2157 LOGH("Warning, Multiple ip/reprocess streams requested!");
2158 }
2159 zslStream = newStream;
2160 }
2161 }
2162 }
2163
2164 /* If a zsl stream is set, we know that we have configured at least one input or
2165 bidirectional stream */
2166 if (NULL != zslStream) {
2167 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2168 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2169 mInputStreamInfo.format = zslStream->format;
2170 mInputStreamInfo.usage = zslStream->usage;
2171 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2172 mInputStreamInfo.dim.width,
2173 mInputStreamInfo.dim.height,
2174 mInputStreamInfo.format, mInputStreamInfo.usage);
2175 }
2176
2177 cleanAndSortStreamInfo();
2178 if (mMetadataChannel) {
2179 delete mMetadataChannel;
2180 mMetadataChannel = NULL;
2181 }
2182 if (mSupportChannel) {
2183 delete mSupportChannel;
2184 mSupportChannel = NULL;
2185 }
2186
2187 if (mAnalysisChannel) {
2188 delete mAnalysisChannel;
2189 mAnalysisChannel = NULL;
2190 }
2191
2192 if (mDummyBatchChannel) {
2193 delete mDummyBatchChannel;
2194 mDummyBatchChannel = NULL;
2195 }
2196
Emilian Peev7650c122017-01-19 08:24:33 -08002197 if (mDepthChannel) {
2198 mDepthChannel = NULL;
2199 }
2200
Thierry Strudel2896d122017-02-23 19:18:03 -08002201 char is_type_value[PROPERTY_VALUE_MAX];
2202 property_get("persist.camera.is_type", is_type_value, "4");
2203 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2204
Thierry Strudel3d639192016-09-09 11:52:26 -07002205 //Create metadata channel and initialize it
2206 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2207 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2208 gCamCapability[mCameraId]->color_arrangement);
2209 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2210 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002211 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002212 if (mMetadataChannel == NULL) {
2213 LOGE("failed to allocate metadata channel");
2214 rc = -ENOMEM;
2215 pthread_mutex_unlock(&mMutex);
2216 return rc;
2217 }
2218 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2219 if (rc < 0) {
2220 LOGE("metadata channel initialization failed");
2221 delete mMetadataChannel;
2222 mMetadataChannel = NULL;
2223 pthread_mutex_unlock(&mMutex);
2224 return rc;
2225 }
2226
Thierry Strudel2896d122017-02-23 19:18:03 -08002227 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002228 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002229 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2231 /* Allocate channel objects for the requested streams */
2232 for (size_t i = 0; i < streamList->num_streams; i++) {
2233 camera3_stream_t *newStream = streamList->streams[i];
2234 uint32_t stream_usage = newStream->usage;
2235 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2236 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2237 struct camera_info *p_info = NULL;
2238 pthread_mutex_lock(&gCamLock);
2239 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2240 pthread_mutex_unlock(&gCamLock);
2241 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2242 || IS_USAGE_ZSL(newStream->usage)) &&
2243 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002244 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2247 if (bUseCommonFeatureMask)
2248 zsl_ppmask = commonFeatureMask;
2249 else
2250 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002251 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002252 if (numStreamsOnEncoder > 0)
2253 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2254 else
2255 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002256 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002257 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 LOGH("Input stream configured, reprocess config");
2261 } else {
2262 //for non zsl streams find out the format
2263 switch (newStream->format) {
2264 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2265 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2268 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2269 /* add additional features to pp feature mask */
2270 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2271 mStreamConfigInfo.num_streams);
2272
2273 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2274 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2275 CAM_STREAM_TYPE_VIDEO;
2276 if (m_bTnrEnabled && m_bTnrVideo) {
2277 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2278 CAM_QCOM_FEATURE_CPP_TNR;
2279 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2281 ~CAM_QCOM_FEATURE_CDS;
2282 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2284 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2285 CAM_QTI_FEATURE_PPEISCORE;
2286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002287 } else {
2288 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2289 CAM_STREAM_TYPE_PREVIEW;
2290 if (m_bTnrEnabled && m_bTnrPreview) {
2291 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2292 CAM_QCOM_FEATURE_CPP_TNR;
2293 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2295 ~CAM_QCOM_FEATURE_CDS;
2296 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002297 if(!m_bSwTnrPreview) {
2298 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2299 ~CAM_QTI_FEATURE_SW_TNR;
2300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 padding_info.width_padding = mSurfaceStridePadding;
2302 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002303 previewSize.width = (int32_t)newStream->width;
2304 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 }
2306 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2307 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2308 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2309 newStream->height;
2310 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2311 newStream->width;
2312 }
2313 }
2314 break;
2315 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002316 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002317 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2318 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2319 if (bUseCommonFeatureMask)
2320 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2321 commonFeatureMask;
2322 else
2323 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2324 CAM_QCOM_FEATURE_NONE;
2325 } else {
2326 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2327 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2328 }
2329 break;
2330 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002331 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2333 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2334 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002337 /* Remove rotation if it is not supported
2338 for 4K LiveVideo snapshot case (online processing) */
2339 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2340 CAM_QCOM_FEATURE_ROTATION)) {
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2342 &= ~CAM_QCOM_FEATURE_ROTATION;
2343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 } else {
2345 if (bUseCommonFeatureMask &&
2346 isOnEncoder(maxViewfinderSize, newStream->width,
2347 newStream->height)) {
2348 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2349 } else {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2351 }
2352 }
2353 if (isZsl) {
2354 if (zslStream) {
2355 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2356 (int32_t)zslStream->width;
2357 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2358 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2360 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 } else {
2362 LOGE("Error, No ZSL stream identified");
2363 pthread_mutex_unlock(&mMutex);
2364 return -EINVAL;
2365 }
2366 } else if (m_bIs4KVideo) {
2367 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2368 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2369 } else if (bYuv888OverrideJpeg) {
2370 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2371 (int32_t)largeYuv888Size.width;
2372 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2373 (int32_t)largeYuv888Size.height;
2374 }
2375 break;
2376 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2377 case HAL_PIXEL_FORMAT_RAW16:
2378 case HAL_PIXEL_FORMAT_RAW10:
2379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2380 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2381 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002382 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2383 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2384 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2385 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2386 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2387 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2388 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2389 gCamCapability[mCameraId]->dt[mPDIndex];
2390 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2391 gCamCapability[mCameraId]->vc[mPDIndex];
2392 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002393 break;
2394 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002395 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002396 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2397 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2398 break;
2399 }
2400 }
2401
2402 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2403 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2404 gCamCapability[mCameraId]->color_arrangement);
2405
2406 if (newStream->priv == NULL) {
2407 //New stream, construct channel
2408 switch (newStream->stream_type) {
2409 case CAMERA3_STREAM_INPUT:
2410 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2411 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2412 break;
2413 case CAMERA3_STREAM_BIDIRECTIONAL:
2414 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2415 GRALLOC_USAGE_HW_CAMERA_WRITE;
2416 break;
2417 case CAMERA3_STREAM_OUTPUT:
2418 /* For video encoding stream, set read/write rarely
2419 * flag so that they may be set to un-cached */
2420 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2421 newStream->usage |=
2422 (GRALLOC_USAGE_SW_READ_RARELY |
2423 GRALLOC_USAGE_SW_WRITE_RARELY |
2424 GRALLOC_USAGE_HW_CAMERA_WRITE);
2425 else if (IS_USAGE_ZSL(newStream->usage))
2426 {
2427 LOGD("ZSL usage flag skipping");
2428 }
2429 else if (newStream == zslStream
2430 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2431 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2432 } else
2433 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2434 break;
2435 default:
2436 LOGE("Invalid stream_type %d", newStream->stream_type);
2437 break;
2438 }
2439
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002440 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2442 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2443 QCamera3ProcessingChannel *channel = NULL;
2444 switch (newStream->format) {
2445 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2446 if ((newStream->usage &
2447 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2448 (streamList->operation_mode ==
2449 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2450 ) {
2451 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2452 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002453 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002454 this,
2455 newStream,
2456 (cam_stream_type_t)
2457 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2459 mMetadataChannel,
2460 0); //heap buffers are not required for HFR video channel
2461 if (channel == NULL) {
2462 LOGE("allocation of channel failed");
2463 pthread_mutex_unlock(&mMutex);
2464 return -ENOMEM;
2465 }
2466 //channel->getNumBuffers() will return 0 here so use
2467 //MAX_INFLIGH_HFR_REQUESTS
2468 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2469 newStream->priv = channel;
2470 LOGI("num video buffers in HFR mode: %d",
2471 MAX_INFLIGHT_HFR_REQUESTS);
2472 } else {
2473 /* Copy stream contents in HFR preview only case to create
2474 * dummy batch channel so that sensor streaming is in
2475 * HFR mode */
2476 if (!m_bIsVideo && (streamList->operation_mode ==
2477 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2478 mDummyBatchStream = *newStream;
2479 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002480 int bufferCount = MAX_INFLIGHT_REQUESTS;
2481 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2482 CAM_STREAM_TYPE_VIDEO) {
2483 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2484 bufferCount = MAX_VIDEO_BUFFERS;
2485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002486 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002488 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 this,
2490 newStream,
2491 (cam_stream_type_t)
2492 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2494 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002495 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002496 if (channel == NULL) {
2497 LOGE("allocation of channel failed");
2498 pthread_mutex_unlock(&mMutex);
2499 return -ENOMEM;
2500 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002501 /* disable UBWC for preview, though supported,
2502 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002503 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 (previewSize.width == (int32_t)videoWidth)&&
2505 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002506 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002507 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002508 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002509 newStream->max_buffers = channel->getNumBuffers();
2510 newStream->priv = channel;
2511 }
2512 break;
2513 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2514 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2515 mChannelHandle,
2516 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002517 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 this,
2519 newStream,
2520 (cam_stream_type_t)
2521 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2522 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2523 mMetadataChannel);
2524 if (channel == NULL) {
2525 LOGE("allocation of YUV channel failed");
2526 pthread_mutex_unlock(&mMutex);
2527 return -ENOMEM;
2528 }
2529 newStream->max_buffers = channel->getNumBuffers();
2530 newStream->priv = channel;
2531 break;
2532 }
2533 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2534 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002535 case HAL_PIXEL_FORMAT_RAW10: {
2536 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2537 (HAL_DATASPACE_DEPTH != newStream->data_space))
2538 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 mRawChannel = new QCamera3RawChannel(
2540 mCameraHandle->camera_handle, mChannelHandle,
2541 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002542 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002543 this, newStream,
2544 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002545 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002546 if (mRawChannel == NULL) {
2547 LOGE("allocation of raw channel failed");
2548 pthread_mutex_unlock(&mMutex);
2549 return -ENOMEM;
2550 }
2551 newStream->max_buffers = mRawChannel->getNumBuffers();
2552 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2553 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002556 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2557 mDepthChannel = new QCamera3DepthChannel(
2558 mCameraHandle->camera_handle, mChannelHandle,
2559 mCameraHandle->ops, NULL, NULL, &padding_info,
2560 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2561 mMetadataChannel);
2562 if (NULL == mDepthChannel) {
2563 LOGE("Allocation of depth channel failed");
2564 pthread_mutex_unlock(&mMutex);
2565 return NO_MEMORY;
2566 }
2567 newStream->priv = mDepthChannel;
2568 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2569 } else {
2570 // Max live snapshot inflight buffer is 1. This is to mitigate
2571 // frame drop issues for video snapshot. The more buffers being
2572 // allocated, the more frame drops there are.
2573 mPictureChannel = new QCamera3PicChannel(
2574 mCameraHandle->camera_handle, mChannelHandle,
2575 mCameraHandle->ops, captureResultCb,
2576 setBufferErrorStatus, &padding_info, this, newStream,
2577 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2578 m_bIs4KVideo, isZsl, mMetadataChannel,
2579 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2580 if (mPictureChannel == NULL) {
2581 LOGE("allocation of channel failed");
2582 pthread_mutex_unlock(&mMutex);
2583 return -ENOMEM;
2584 }
2585 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2586 newStream->max_buffers = mPictureChannel->getNumBuffers();
2587 mPictureChannel->overrideYuvSize(
2588 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2589 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002591 break;
2592
2593 default:
2594 LOGE("not a supported format 0x%x", newStream->format);
2595 break;
2596 }
2597 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2598 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2599 } else {
2600 LOGE("Error, Unknown stream type");
2601 pthread_mutex_unlock(&mMutex);
2602 return -EINVAL;
2603 }
2604
2605 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002606 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2607 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002608 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002609 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002610 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2611 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2612 }
2613 }
2614
2615 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2616 it != mStreamInfo.end(); it++) {
2617 if ((*it)->stream == newStream) {
2618 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2619 break;
2620 }
2621 }
2622 } else {
2623 // Channel already exists for this stream
2624 // Do nothing for now
2625 }
2626 padding_info = gCamCapability[mCameraId]->padding_info;
2627
Emilian Peev7650c122017-01-19 08:24:33 -08002628 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 * since there is no real stream associated with it
2630 */
Emilian Peev7650c122017-01-19 08:24:33 -08002631 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002632 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2633 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002634 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002635 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002636 }
2637
Thierry Strudel2896d122017-02-23 19:18:03 -08002638 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2639 onlyRaw = false;
2640 }
2641
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002642 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002643 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002644 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002645 cam_analysis_info_t analysisInfo;
2646 int32_t ret = NO_ERROR;
2647 ret = mCommon.getAnalysisInfo(
2648 FALSE,
2649 analysisFeatureMask,
2650 &analysisInfo);
2651 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002652 cam_color_filter_arrangement_t analysis_color_arrangement =
2653 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2654 CAM_FILTER_ARRANGEMENT_Y :
2655 gCamCapability[mCameraId]->color_arrangement);
2656 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2657 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002658 cam_dimension_t analysisDim;
2659 analysisDim = mCommon.getMatchingDimension(previewSize,
2660 analysisInfo.analysis_recommended_res);
2661
2662 mAnalysisChannel = new QCamera3SupportChannel(
2663 mCameraHandle->camera_handle,
2664 mChannelHandle,
2665 mCameraHandle->ops,
2666 &analysisInfo.analysis_padding_info,
2667 analysisFeatureMask,
2668 CAM_STREAM_TYPE_ANALYSIS,
2669 &analysisDim,
2670 (analysisInfo.analysis_format
2671 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2672 : CAM_FORMAT_YUV_420_NV21),
2673 analysisInfo.hw_analysis_supported,
2674 gCamCapability[mCameraId]->color_arrangement,
2675 this,
2676 0); // force buffer count to 0
2677 } else {
2678 LOGW("getAnalysisInfo failed, ret = %d", ret);
2679 }
2680 if (!mAnalysisChannel) {
2681 LOGW("Analysis channel cannot be created");
2682 }
2683 }
2684
Thierry Strudel3d639192016-09-09 11:52:26 -07002685 //RAW DUMP channel
2686 if (mEnableRawDump && isRawStreamRequested == false){
2687 cam_dimension_t rawDumpSize;
2688 rawDumpSize = getMaxRawSize(mCameraId);
2689 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2690 setPAAFSupport(rawDumpFeatureMask,
2691 CAM_STREAM_TYPE_RAW,
2692 gCamCapability[mCameraId]->color_arrangement);
2693 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2694 mChannelHandle,
2695 mCameraHandle->ops,
2696 rawDumpSize,
2697 &padding_info,
2698 this, rawDumpFeatureMask);
2699 if (!mRawDumpChannel) {
2700 LOGE("Raw Dump channel cannot be created");
2701 pthread_mutex_unlock(&mMutex);
2702 return -ENOMEM;
2703 }
2704 }
2705
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 if (mAnalysisChannel) {
2707 cam_analysis_info_t analysisInfo;
2708 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2710 CAM_STREAM_TYPE_ANALYSIS;
2711 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2712 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002713 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002714 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2715 &analysisInfo);
2716 if (rc != NO_ERROR) {
2717 LOGE("getAnalysisInfo failed, ret = %d", rc);
2718 pthread_mutex_unlock(&mMutex);
2719 return rc;
2720 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002721 cam_color_filter_arrangement_t analysis_color_arrangement =
2722 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2723 CAM_FILTER_ARRANGEMENT_Y :
2724 gCamCapability[mCameraId]->color_arrangement);
2725 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2726 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2727 analysis_color_arrangement);
2728
Thierry Strudel3d639192016-09-09 11:52:26 -07002729 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002730 mCommon.getMatchingDimension(previewSize,
2731 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002732 mStreamConfigInfo.num_streams++;
2733 }
2734
Thierry Strudel2896d122017-02-23 19:18:03 -08002735 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002736 cam_analysis_info_t supportInfo;
2737 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2738 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2739 setPAAFSupport(callbackFeatureMask,
2740 CAM_STREAM_TYPE_CALLBACK,
2741 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002742 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002743 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002744 if (ret != NO_ERROR) {
2745 /* Ignore the error for Mono camera
2746 * because the PAAF bit mask is only set
2747 * for CAM_STREAM_TYPE_ANALYSIS stream type
2748 */
2749 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2750 LOGW("getAnalysisInfo failed, ret = %d", ret);
2751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002752 }
2753 mSupportChannel = new QCamera3SupportChannel(
2754 mCameraHandle->camera_handle,
2755 mChannelHandle,
2756 mCameraHandle->ops,
2757 &gCamCapability[mCameraId]->padding_info,
2758 callbackFeatureMask,
2759 CAM_STREAM_TYPE_CALLBACK,
2760 &QCamera3SupportChannel::kDim,
2761 CAM_FORMAT_YUV_420_NV21,
2762 supportInfo.hw_analysis_supported,
2763 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002764 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 if (!mSupportChannel) {
2766 LOGE("dummy channel cannot be created");
2767 pthread_mutex_unlock(&mMutex);
2768 return -ENOMEM;
2769 }
2770 }
2771
2772 if (mSupportChannel) {
2773 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2774 QCamera3SupportChannel::kDim;
2775 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2776 CAM_STREAM_TYPE_CALLBACK;
2777 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2778 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2779 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2780 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2781 gCamCapability[mCameraId]->color_arrangement);
2782 mStreamConfigInfo.num_streams++;
2783 }
2784
2785 if (mRawDumpChannel) {
2786 cam_dimension_t rawSize;
2787 rawSize = getMaxRawSize(mCameraId);
2788 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2789 rawSize;
2790 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2791 CAM_STREAM_TYPE_RAW;
2792 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2793 CAM_QCOM_FEATURE_NONE;
2794 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2795 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2796 gCamCapability[mCameraId]->color_arrangement);
2797 mStreamConfigInfo.num_streams++;
2798 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002799
2800 if (mHdrPlusRawSrcChannel) {
2801 cam_dimension_t rawSize;
2802 rawSize = getMaxRawSize(mCameraId);
2803 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2804 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2805 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2806 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2807 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2808 gCamCapability[mCameraId]->color_arrangement);
2809 mStreamConfigInfo.num_streams++;
2810 }
2811
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 /* In HFR mode, if video stream is not added, create a dummy channel so that
2813 * ISP can create a batch mode even for preview only case. This channel is
2814 * never 'start'ed (no stream-on), it is only 'initialized' */
2815 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2816 !m_bIsVideo) {
2817 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2818 setPAAFSupport(dummyFeatureMask,
2819 CAM_STREAM_TYPE_VIDEO,
2820 gCamCapability[mCameraId]->color_arrangement);
2821 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2822 mChannelHandle,
2823 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002824 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002825 this,
2826 &mDummyBatchStream,
2827 CAM_STREAM_TYPE_VIDEO,
2828 dummyFeatureMask,
2829 mMetadataChannel);
2830 if (NULL == mDummyBatchChannel) {
2831 LOGE("creation of mDummyBatchChannel failed."
2832 "Preview will use non-hfr sensor mode ");
2833 }
2834 }
2835 if (mDummyBatchChannel) {
2836 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2837 mDummyBatchStream.width;
2838 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2839 mDummyBatchStream.height;
2840 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2841 CAM_STREAM_TYPE_VIDEO;
2842 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2843 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2844 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2845 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2846 gCamCapability[mCameraId]->color_arrangement);
2847 mStreamConfigInfo.num_streams++;
2848 }
2849
2850 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2851 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002852 m_bIs4KVideo ? 0 :
2853 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002854
2855 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2856 for (pendingRequestIterator i = mPendingRequestsList.begin();
2857 i != mPendingRequestsList.end();) {
2858 i = erasePendingRequest(i);
2859 }
2860 mPendingFrameDropList.clear();
2861 // Initialize/Reset the pending buffers list
2862 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2863 req.mPendingBufferList.clear();
2864 }
2865 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2866
Thierry Strudel3d639192016-09-09 11:52:26 -07002867 mCurJpegMeta.clear();
2868 //Get min frame duration for this streams configuration
2869 deriveMinFrameDuration();
2870
Chien-Yu Chenee335912017-02-09 17:53:20 -08002871 mFirstPreviewIntentSeen = false;
2872
2873 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002874 {
2875 Mutex::Autolock l(gHdrPlusClientLock);
2876 disableHdrPlusModeLocked();
2877 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002878
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 // Update state
2880 mState = CONFIGURED;
2881
2882 pthread_mutex_unlock(&mMutex);
2883
2884 return rc;
2885}
2886
2887/*===========================================================================
2888 * FUNCTION : validateCaptureRequest
2889 *
2890 * DESCRIPTION: validate a capture request from camera service
2891 *
2892 * PARAMETERS :
2893 * @request : request from framework to process
2894 *
2895 * RETURN :
2896 *
2897 *==========================================================================*/
2898int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002899 camera3_capture_request_t *request,
2900 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002901{
2902 ssize_t idx = 0;
2903 const camera3_stream_buffer_t *b;
2904 CameraMetadata meta;
2905
2906 /* Sanity check the request */
2907 if (request == NULL) {
2908 LOGE("NULL capture request");
2909 return BAD_VALUE;
2910 }
2911
2912 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2913 /*settings cannot be null for the first request*/
2914 return BAD_VALUE;
2915 }
2916
2917 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002918 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2919 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002920 LOGE("Request %d: No output buffers provided!",
2921 __FUNCTION__, frameNumber);
2922 return BAD_VALUE;
2923 }
2924 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2925 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2926 request->num_output_buffers, MAX_NUM_STREAMS);
2927 return BAD_VALUE;
2928 }
2929 if (request->input_buffer != NULL) {
2930 b = request->input_buffer;
2931 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2932 LOGE("Request %d: Buffer %ld: Status not OK!",
2933 frameNumber, (long)idx);
2934 return BAD_VALUE;
2935 }
2936 if (b->release_fence != -1) {
2937 LOGE("Request %d: Buffer %ld: Has a release fence!",
2938 frameNumber, (long)idx);
2939 return BAD_VALUE;
2940 }
2941 if (b->buffer == NULL) {
2942 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2943 frameNumber, (long)idx);
2944 return BAD_VALUE;
2945 }
2946 }
2947
2948 // Validate all buffers
2949 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002950 if (b == NULL) {
2951 return BAD_VALUE;
2952 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002953 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002954 QCamera3ProcessingChannel *channel =
2955 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2956 if (channel == NULL) {
2957 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2958 frameNumber, (long)idx);
2959 return BAD_VALUE;
2960 }
2961 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2962 LOGE("Request %d: Buffer %ld: Status not OK!",
2963 frameNumber, (long)idx);
2964 return BAD_VALUE;
2965 }
2966 if (b->release_fence != -1) {
2967 LOGE("Request %d: Buffer %ld: Has a release fence!",
2968 frameNumber, (long)idx);
2969 return BAD_VALUE;
2970 }
2971 if (b->buffer == NULL) {
2972 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2973 frameNumber, (long)idx);
2974 return BAD_VALUE;
2975 }
2976 if (*(b->buffer) == NULL) {
2977 LOGE("Request %d: Buffer %ld: NULL private handle!",
2978 frameNumber, (long)idx);
2979 return BAD_VALUE;
2980 }
2981 idx++;
2982 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002984 return NO_ERROR;
2985}
2986
2987/*===========================================================================
2988 * FUNCTION : deriveMinFrameDuration
2989 *
2990 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2991 * on currently configured streams.
2992 *
2993 * PARAMETERS : NONE
2994 *
2995 * RETURN : NONE
2996 *
2997 *==========================================================================*/
2998void QCamera3HardwareInterface::deriveMinFrameDuration()
2999{
3000 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3001
3002 maxJpegDim = 0;
3003 maxProcessedDim = 0;
3004 maxRawDim = 0;
3005
3006 // Figure out maximum jpeg, processed, and raw dimensions
3007 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3008 it != mStreamInfo.end(); it++) {
3009
3010 // Input stream doesn't have valid stream_type
3011 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3012 continue;
3013
3014 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3015 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3016 if (dimension > maxJpegDim)
3017 maxJpegDim = dimension;
3018 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3019 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3020 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3021 if (dimension > maxRawDim)
3022 maxRawDim = dimension;
3023 } else {
3024 if (dimension > maxProcessedDim)
3025 maxProcessedDim = dimension;
3026 }
3027 }
3028
3029 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3030 MAX_SIZES_CNT);
3031
3032 //Assume all jpeg dimensions are in processed dimensions.
3033 if (maxJpegDim > maxProcessedDim)
3034 maxProcessedDim = maxJpegDim;
3035 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3036 if (maxProcessedDim > maxRawDim) {
3037 maxRawDim = INT32_MAX;
3038
3039 for (size_t i = 0; i < count; i++) {
3040 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3041 gCamCapability[mCameraId]->raw_dim[i].height;
3042 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3043 maxRawDim = dimension;
3044 }
3045 }
3046
3047 //Find minimum durations for processed, jpeg, and raw
3048 for (size_t i = 0; i < count; i++) {
3049 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3050 gCamCapability[mCameraId]->raw_dim[i].height) {
3051 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3052 break;
3053 }
3054 }
3055 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3056 for (size_t i = 0; i < count; i++) {
3057 if (maxProcessedDim ==
3058 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3059 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3060 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3061 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3062 break;
3063 }
3064 }
3065}
3066
3067/*===========================================================================
3068 * FUNCTION : getMinFrameDuration
3069 *
3070 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3071 * and current request configuration.
3072 *
3073 * PARAMETERS : @request: requset sent by the frameworks
3074 *
3075 * RETURN : min farme duration for a particular request
3076 *
3077 *==========================================================================*/
3078int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3079{
3080 bool hasJpegStream = false;
3081 bool hasRawStream = false;
3082 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3083 const camera3_stream_t *stream = request->output_buffers[i].stream;
3084 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3085 hasJpegStream = true;
3086 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3087 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3088 stream->format == HAL_PIXEL_FORMAT_RAW16)
3089 hasRawStream = true;
3090 }
3091
3092 if (!hasJpegStream)
3093 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3094 else
3095 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3096}
3097
3098/*===========================================================================
3099 * FUNCTION : handleBuffersDuringFlushLock
3100 *
3101 * DESCRIPTION: Account for buffers returned from back-end during flush
3102 * This function is executed while mMutex is held by the caller.
3103 *
3104 * PARAMETERS :
3105 * @buffer: image buffer for the callback
3106 *
3107 * RETURN :
3108 *==========================================================================*/
3109void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3110{
3111 bool buffer_found = false;
3112 for (List<PendingBuffersInRequest>::iterator req =
3113 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3114 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3115 for (List<PendingBufferInfo>::iterator i =
3116 req->mPendingBufferList.begin();
3117 i != req->mPendingBufferList.end(); i++) {
3118 if (i->buffer == buffer->buffer) {
3119 mPendingBuffersMap.numPendingBufsAtFlush--;
3120 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3121 buffer->buffer, req->frame_number,
3122 mPendingBuffersMap.numPendingBufsAtFlush);
3123 buffer_found = true;
3124 break;
3125 }
3126 }
3127 if (buffer_found) {
3128 break;
3129 }
3130 }
3131 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3132 //signal the flush()
3133 LOGD("All buffers returned to HAL. Continue flush");
3134 pthread_cond_signal(&mBuffersCond);
3135 }
3136}
3137
Thierry Strudel3d639192016-09-09 11:52:26 -07003138/*===========================================================================
3139 * FUNCTION : handleBatchMetadata
3140 *
3141 * DESCRIPTION: Handles metadata buffer callback in batch mode
3142 *
3143 * PARAMETERS : @metadata_buf: metadata buffer
3144 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3145 * the meta buf in this method
3146 *
3147 * RETURN :
3148 *
3149 *==========================================================================*/
3150void QCamera3HardwareInterface::handleBatchMetadata(
3151 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3152{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003153 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003154
3155 if (NULL == metadata_buf) {
3156 LOGE("metadata_buf is NULL");
3157 return;
3158 }
3159 /* In batch mode, the metdata will contain the frame number and timestamp of
3160 * the last frame in the batch. Eg: a batch containing buffers from request
3161 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3162 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3163 * multiple process_capture_results */
3164 metadata_buffer_t *metadata =
3165 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3166 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3167 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3168 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3169 uint32_t frame_number = 0, urgent_frame_number = 0;
3170 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3171 bool invalid_metadata = false;
3172 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3173 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003174 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003175
3176 int32_t *p_frame_number_valid =
3177 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3178 uint32_t *p_frame_number =
3179 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3180 int64_t *p_capture_time =
3181 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3182 int32_t *p_urgent_frame_number_valid =
3183 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3184 uint32_t *p_urgent_frame_number =
3185 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3186
3187 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3188 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3189 (NULL == p_urgent_frame_number)) {
3190 LOGE("Invalid metadata");
3191 invalid_metadata = true;
3192 } else {
3193 frame_number_valid = *p_frame_number_valid;
3194 last_frame_number = *p_frame_number;
3195 last_frame_capture_time = *p_capture_time;
3196 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3197 last_urgent_frame_number = *p_urgent_frame_number;
3198 }
3199
3200 /* In batchmode, when no video buffers are requested, set_parms are sent
3201 * for every capture_request. The difference between consecutive urgent
3202 * frame numbers and frame numbers should be used to interpolate the
3203 * corresponding frame numbers and time stamps */
3204 pthread_mutex_lock(&mMutex);
3205 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003206 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3207 if(idx < 0) {
3208 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3209 last_urgent_frame_number);
3210 mState = ERROR;
3211 pthread_mutex_unlock(&mMutex);
3212 return;
3213 }
3214 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003215 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3216 first_urgent_frame_number;
3217
3218 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3219 urgent_frame_number_valid,
3220 first_urgent_frame_number, last_urgent_frame_number);
3221 }
3222
3223 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003224 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3225 if(idx < 0) {
3226 LOGE("Invalid frame number received: %d. Irrecoverable error",
3227 last_frame_number);
3228 mState = ERROR;
3229 pthread_mutex_unlock(&mMutex);
3230 return;
3231 }
3232 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003233 frameNumDiff = last_frame_number + 1 -
3234 first_frame_number;
3235 mPendingBatchMap.removeItem(last_frame_number);
3236
3237 LOGD("frm: valid: %d frm_num: %d - %d",
3238 frame_number_valid,
3239 first_frame_number, last_frame_number);
3240
3241 }
3242 pthread_mutex_unlock(&mMutex);
3243
3244 if (urgent_frame_number_valid || frame_number_valid) {
3245 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3246 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3247 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3248 urgentFrameNumDiff, last_urgent_frame_number);
3249 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3250 LOGE("frameNumDiff: %d frameNum: %d",
3251 frameNumDiff, last_frame_number);
3252 }
3253
3254 for (size_t i = 0; i < loopCount; i++) {
3255 /* handleMetadataWithLock is called even for invalid_metadata for
3256 * pipeline depth calculation */
3257 if (!invalid_metadata) {
3258 /* Infer frame number. Batch metadata contains frame number of the
3259 * last frame */
3260 if (urgent_frame_number_valid) {
3261 if (i < urgentFrameNumDiff) {
3262 urgent_frame_number =
3263 first_urgent_frame_number + i;
3264 LOGD("inferred urgent frame_number: %d",
3265 urgent_frame_number);
3266 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3267 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3268 } else {
3269 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3270 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3271 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3272 }
3273 }
3274
3275 /* Infer frame number. Batch metadata contains frame number of the
3276 * last frame */
3277 if (frame_number_valid) {
3278 if (i < frameNumDiff) {
3279 frame_number = first_frame_number + i;
3280 LOGD("inferred frame_number: %d", frame_number);
3281 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3282 CAM_INTF_META_FRAME_NUMBER, frame_number);
3283 } else {
3284 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3285 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3286 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3287 }
3288 }
3289
3290 if (last_frame_capture_time) {
3291 //Infer timestamp
3292 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003293 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003294 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003295 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3297 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3298 LOGD("batch capture_time: %lld, capture_time: %lld",
3299 last_frame_capture_time, capture_time);
3300 }
3301 }
3302 pthread_mutex_lock(&mMutex);
3303 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003304 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003305 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3306 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003307 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308 pthread_mutex_unlock(&mMutex);
3309 }
3310
3311 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003312 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003313 mMetadataChannel->bufDone(metadata_buf);
3314 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003315 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003316 }
3317}
3318
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003319void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3320 camera3_error_msg_code_t errorCode)
3321{
3322 camera3_notify_msg_t notify_msg;
3323 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3324 notify_msg.type = CAMERA3_MSG_ERROR;
3325 notify_msg.message.error.error_code = errorCode;
3326 notify_msg.message.error.error_stream = NULL;
3327 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003328 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003329
3330 return;
3331}
Thierry Strudel3d639192016-09-09 11:52:26 -07003332/*===========================================================================
3333 * FUNCTION : handleMetadataWithLock
3334 *
3335 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3336 *
3337 * PARAMETERS : @metadata_buf: metadata buffer
3338 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3339 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003340 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3341 * last urgent metadata in a batch. Always true for non-batch mode
3342 * @lastMetadataInBatch: Boolean to indicate whether this is the
3343 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003344 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3345 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003346 *
3347 * RETURN :
3348 *
3349 *==========================================================================*/
3350void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003351 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003352 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3353 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003354{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003355 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003356 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3357 //during flush do not send metadata from this thread
3358 LOGD("not sending metadata during flush or when mState is error");
3359 if (free_and_bufdone_meta_buf) {
3360 mMetadataChannel->bufDone(metadata_buf);
3361 free(metadata_buf);
3362 }
3363 return;
3364 }
3365
3366 //not in flush
3367 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3368 int32_t frame_number_valid, urgent_frame_number_valid;
3369 uint32_t frame_number, urgent_frame_number;
3370 int64_t capture_time;
3371 nsecs_t currentSysTime;
3372
3373 int32_t *p_frame_number_valid =
3374 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3375 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3376 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3377 int32_t *p_urgent_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_urgent_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3381 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3382 metadata) {
3383 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3384 *p_frame_number_valid, *p_frame_number);
3385 }
3386
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003387 camera_metadata_t *resultMetadata = nullptr;
3388
Thierry Strudel3d639192016-09-09 11:52:26 -07003389 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3390 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 if (free_and_bufdone_meta_buf) {
3393 mMetadataChannel->bufDone(metadata_buf);
3394 free(metadata_buf);
3395 }
3396 goto done_metadata;
3397 }
3398 frame_number_valid = *p_frame_number_valid;
3399 frame_number = *p_frame_number;
3400 capture_time = *p_capture_time;
3401 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3402 urgent_frame_number = *p_urgent_frame_number;
3403 currentSysTime = systemTime(CLOCK_MONOTONIC);
3404
3405 // Detect if buffers from any requests are overdue
3406 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003407 int64_t timeout;
3408 {
3409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3410 // If there is a pending HDR+ request, the following requests may be blocked until the
3411 // HDR+ request is done. So allow a longer timeout.
3412 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3413 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3414 }
3415
3416 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003417 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003418 assert(missed.stream->priv);
3419 if (missed.stream->priv) {
3420 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3421 assert(ch->mStreams[0]);
3422 if (ch->mStreams[0]) {
3423 LOGE("Cancel missing frame = %d, buffer = %p,"
3424 "stream type = %d, stream format = %d",
3425 req.frame_number, missed.buffer,
3426 ch->mStreams[0]->getMyType(), missed.stream->format);
3427 ch->timeoutFrame(req.frame_number);
3428 }
3429 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003430 }
3431 }
3432 }
3433 //Partial result on process_capture_result for timestamp
3434 if (urgent_frame_number_valid) {
3435 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3436 urgent_frame_number, capture_time);
3437
3438 //Recieved an urgent Frame Number, handle it
3439 //using partial results
3440 for (pendingRequestIterator i =
3441 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3442 LOGD("Iterator Frame = %d urgent frame = %d",
3443 i->frame_number, urgent_frame_number);
3444
3445 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3446 (i->partial_result_cnt == 0)) {
3447 LOGE("Error: HAL missed urgent metadata for frame number %d",
3448 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003449 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003450 }
3451
3452 if (i->frame_number == urgent_frame_number &&
3453 i->bUrgentReceived == 0) {
3454
3455 camera3_capture_result_t result;
3456 memset(&result, 0, sizeof(camera3_capture_result_t));
3457
3458 i->partial_result_cnt++;
3459 i->bUrgentReceived = 1;
3460 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003461 result.result = translateCbUrgentMetadataToResultMetadata(
3462 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003463 // Populate metadata result
3464 result.frame_number = urgent_frame_number;
3465 result.num_output_buffers = 0;
3466 result.output_buffers = NULL;
3467 result.partial_result = i->partial_result_cnt;
3468
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003469 {
3470 Mutex::Autolock l(gHdrPlusClientLock);
3471 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3472 // Notify HDR+ client about the partial metadata.
3473 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3474 result.partial_result == PARTIAL_RESULT_COUNT);
3475 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003476 }
3477
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003478 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 LOGD("urgent frame_number = %u, capture_time = %lld",
3480 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003481 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3482 // Instant AEC settled for this frame.
3483 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3484 mInstantAECSettledFrameNumber = urgent_frame_number;
3485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003486 free_camera_metadata((camera_metadata_t *)result.result);
3487 break;
3488 }
3489 }
3490 }
3491
3492 if (!frame_number_valid) {
3493 LOGD("Not a valid normal frame number, used as SOF only");
3494 if (free_and_bufdone_meta_buf) {
3495 mMetadataChannel->bufDone(metadata_buf);
3496 free(metadata_buf);
3497 }
3498 goto done_metadata;
3499 }
3500 LOGH("valid frame_number = %u, capture_time = %lld",
3501 frame_number, capture_time);
3502
Emilian Peev7650c122017-01-19 08:24:33 -08003503 if (metadata->is_depth_data_valid) {
3504 handleDepthDataLocked(metadata->depth_data, frame_number);
3505 }
3506
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003507 // Check whether any stream buffer corresponding to this is dropped or not
3508 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3509 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3510 for (auto & pendingRequest : mPendingRequestsList) {
3511 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3512 mInstantAECSettledFrameNumber)) {
3513 camera3_notify_msg_t notify_msg = {};
3514 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003515 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003516 QCamera3ProcessingChannel *channel =
3517 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003518 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003519 if (p_cam_frame_drop) {
3520 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003521 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003522 // Got the stream ID for drop frame.
3523 dropFrame = true;
3524 break;
3525 }
3526 }
3527 } else {
3528 // This is instant AEC case.
3529 // For instant AEC drop the stream untill AEC is settled.
3530 dropFrame = true;
3531 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003532
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003533 if (dropFrame) {
3534 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3535 if (p_cam_frame_drop) {
3536 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003537 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003538 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003539 } else {
3540 // For instant AEC, inform frame drop and frame number
3541 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3542 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003543 pendingRequest.frame_number, streamID,
3544 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003545 }
3546 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003549 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003550 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003551 if (p_cam_frame_drop) {
3552 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003553 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003554 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003555 } else {
3556 // For instant AEC, inform frame drop and frame number
3557 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3558 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003559 pendingRequest.frame_number, streamID,
3560 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003561 }
3562 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003564 PendingFrameDrop.stream_ID = streamID;
3565 // Add the Frame drop info to mPendingFrameDropList
3566 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 }
3569 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003571
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 for (auto & pendingRequest : mPendingRequestsList) {
3573 // Find the pending request with the frame number.
3574 if (pendingRequest.frame_number == frame_number) {
3575 // Update the sensor timestamp.
3576 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003577
Thierry Strudel3d639192016-09-09 11:52:26 -07003578
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003579 /* Set the timestamp in display metadata so that clients aware of
3580 private_handle such as VT can use this un-modified timestamps.
3581 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003583
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 // Find channel requiring metadata, meaning internal offline postprocess
3585 // is needed.
3586 //TODO: for now, we don't support two streams requiring metadata at the same time.
3587 // (because we are not making copies, and metadata buffer is not reference counted.
3588 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003589 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3590 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003591 if (iter->need_metadata) {
3592 internalPproc = true;
3593 QCamera3ProcessingChannel *channel =
3594 (QCamera3ProcessingChannel *)iter->stream->priv;
3595 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003596 if(p_is_metabuf_queued != NULL) {
3597 *p_is_metabuf_queued = true;
3598 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003599 break;
3600 }
3601 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 for (auto itr = pendingRequest.internalRequestList.begin();
3603 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003604 if (itr->need_metadata) {
3605 internalPproc = true;
3606 QCamera3ProcessingChannel *channel =
3607 (QCamera3ProcessingChannel *)itr->stream->priv;
3608 channel->queueReprocMetadata(metadata_buf);
3609 break;
3610 }
3611 }
3612
Thierry Strudel54dc9782017-02-15 12:12:10 -08003613 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003614 resultMetadata = translateFromHalMetadata(metadata,
3615 pendingRequest.timestamp, pendingRequest.request_id,
3616 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3617 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003618 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003619 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003620 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003621 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 internalPproc, pendingRequest.fwkCacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003623 lastMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003624
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003625 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003626
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003627 if (pendingRequest.blob_request) {
3628 //Dump tuning metadata if enabled and available
3629 char prop[PROPERTY_VALUE_MAX];
3630 memset(prop, 0, sizeof(prop));
3631 property_get("persist.camera.dumpmetadata", prop, "0");
3632 int32_t enabled = atoi(prop);
3633 if (enabled && metadata->is_tuning_params_valid) {
3634 dumpMetadataToFile(metadata->tuning_params,
3635 mMetaFrameCount,
3636 enabled,
3637 "Snapshot",
3638 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 }
3640 }
3641
3642 if (!internalPproc) {
3643 LOGD("couldn't find need_metadata for this metadata");
3644 // Return metadata buffer
3645 if (free_and_bufdone_meta_buf) {
3646 mMetadataChannel->bufDone(metadata_buf);
3647 free(metadata_buf);
3648 }
3649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003650
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003651 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003652 }
3653 }
3654
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 // Try to send out shutter callbacks and capture results.
3656 handlePendingResultsWithLock(frame_number, resultMetadata);
3657 return;
3658
Thierry Strudel3d639192016-09-09 11:52:26 -07003659done_metadata:
3660 for (pendingRequestIterator i = mPendingRequestsList.begin();
3661 i != mPendingRequestsList.end() ;i++) {
3662 i->pipeline_depth++;
3663 }
3664 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3665 unblockRequestIfNecessary();
3666}
3667
3668/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003669 * FUNCTION : handleDepthDataWithLock
3670 *
3671 * DESCRIPTION: Handles incoming depth data
3672 *
3673 * PARAMETERS : @depthData : Depth data
3674 * @frameNumber: Frame number of the incoming depth data
3675 *
3676 * RETURN :
3677 *
3678 *==========================================================================*/
3679void QCamera3HardwareInterface::handleDepthDataLocked(
3680 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3681 uint32_t currentFrameNumber;
3682 buffer_handle_t *depthBuffer;
3683
3684 if (nullptr == mDepthChannel) {
3685 LOGE("Depth channel not present!");
3686 return;
3687 }
3688
3689 camera3_stream_buffer_t resultBuffer =
3690 {.acquire_fence = -1,
3691 .release_fence = -1,
3692 .status = CAMERA3_BUFFER_STATUS_OK,
3693 .buffer = nullptr,
3694 .stream = mDepthChannel->getStream()};
3695 camera3_capture_result_t result =
3696 {.result = nullptr,
3697 .num_output_buffers = 1,
3698 .output_buffers = &resultBuffer,
3699 .partial_result = 0,
3700 .frame_number = 0};
3701
3702 do {
3703 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3704 if (nullptr == depthBuffer) {
3705 break;
3706 }
3707
3708 result.frame_number = currentFrameNumber;
3709 resultBuffer.buffer = depthBuffer;
3710 if (currentFrameNumber == frameNumber) {
3711 int32_t rc = mDepthChannel->populateDepthData(depthData,
3712 frameNumber);
3713 if (NO_ERROR != rc) {
3714 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3715 } else {
3716 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3717 }
3718 } else if (currentFrameNumber > frameNumber) {
3719 break;
3720 } else {
3721 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3722 {{currentFrameNumber, mDepthChannel->getStream(),
3723 CAMERA3_MSG_ERROR_BUFFER}}};
3724 orchestrateNotify(&notify_msg);
3725
3726 LOGE("Depth buffer for frame number: %d is missing "
3727 "returning back!", currentFrameNumber);
3728 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3729 }
3730 mDepthChannel->unmapBuffer(currentFrameNumber);
3731
3732 orchestrateResult(&result);
3733 } while (currentFrameNumber < frameNumber);
3734}
3735
3736/*===========================================================================
3737 * FUNCTION : notifyErrorFoPendingDepthData
3738 *
3739 * DESCRIPTION: Returns error for any pending depth buffers
3740 *
3741 * PARAMETERS : depthCh - depth channel that needs to get flushed
3742 *
3743 * RETURN :
3744 *
3745 *==========================================================================*/
3746void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3747 QCamera3DepthChannel *depthCh) {
3748 uint32_t currentFrameNumber;
3749 buffer_handle_t *depthBuffer;
3750
3751 if (nullptr == depthCh) {
3752 return;
3753 }
3754
3755 camera3_notify_msg_t notify_msg =
3756 {.type = CAMERA3_MSG_ERROR,
3757 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3758 camera3_stream_buffer_t resultBuffer =
3759 {.acquire_fence = -1,
3760 .release_fence = -1,
3761 .buffer = nullptr,
3762 .stream = depthCh->getStream(),
3763 .status = CAMERA3_BUFFER_STATUS_ERROR};
3764 camera3_capture_result_t result =
3765 {.result = nullptr,
3766 .frame_number = 0,
3767 .num_output_buffers = 1,
3768 .partial_result = 0,
3769 .output_buffers = &resultBuffer};
3770
3771 while (nullptr !=
3772 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3773 depthCh->unmapBuffer(currentFrameNumber);
3774
3775 notify_msg.message.error.frame_number = currentFrameNumber;
3776 orchestrateNotify(&notify_msg);
3777
3778 resultBuffer.buffer = depthBuffer;
3779 result.frame_number = currentFrameNumber;
3780 orchestrateResult(&result);
3781 };
3782}
3783
3784/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003785 * FUNCTION : hdrPlusPerfLock
3786 *
3787 * DESCRIPTION: perf lock for HDR+ using custom intent
3788 *
3789 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3790 *
3791 * RETURN : None
3792 *
3793 *==========================================================================*/
3794void QCamera3HardwareInterface::hdrPlusPerfLock(
3795 mm_camera_super_buf_t *metadata_buf)
3796{
3797 if (NULL == metadata_buf) {
3798 LOGE("metadata_buf is NULL");
3799 return;
3800 }
3801 metadata_buffer_t *metadata =
3802 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3803 int32_t *p_frame_number_valid =
3804 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3805 uint32_t *p_frame_number =
3806 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3807
3808 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3809 LOGE("%s: Invalid metadata", __func__);
3810 return;
3811 }
3812
3813 //acquire perf lock for 5 sec after the last HDR frame is captured
3814 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3815 if ((p_frame_number != NULL) &&
3816 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003817 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003818 }
3819 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003820}
3821
3822/*===========================================================================
3823 * FUNCTION : handleInputBufferWithLock
3824 *
3825 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3826 *
3827 * PARAMETERS : @frame_number: frame number of the input buffer
3828 *
3829 * RETURN :
3830 *
3831 *==========================================================================*/
3832void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3833{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003834 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003835 pendingRequestIterator i = mPendingRequestsList.begin();
3836 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3837 i++;
3838 }
3839 if (i != mPendingRequestsList.end() && i->input_buffer) {
3840 //found the right request
3841 if (!i->shutter_notified) {
3842 CameraMetadata settings;
3843 camera3_notify_msg_t notify_msg;
3844 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3845 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3846 if(i->settings) {
3847 settings = i->settings;
3848 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3849 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3850 } else {
3851 LOGE("No timestamp in input settings! Using current one.");
3852 }
3853 } else {
3854 LOGE("Input settings missing!");
3855 }
3856
3857 notify_msg.type = CAMERA3_MSG_SHUTTER;
3858 notify_msg.message.shutter.frame_number = frame_number;
3859 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003860 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003861 i->shutter_notified = true;
3862 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3863 i->frame_number, notify_msg.message.shutter.timestamp);
3864 }
3865
3866 if (i->input_buffer->release_fence != -1) {
3867 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3868 close(i->input_buffer->release_fence);
3869 if (rc != OK) {
3870 LOGE("input buffer sync wait failed %d", rc);
3871 }
3872 }
3873
3874 camera3_capture_result result;
3875 memset(&result, 0, sizeof(camera3_capture_result));
3876 result.frame_number = frame_number;
3877 result.result = i->settings;
3878 result.input_buffer = i->input_buffer;
3879 result.partial_result = PARTIAL_RESULT_COUNT;
3880
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003881 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003882 LOGD("Input request metadata and input buffer frame_number = %u",
3883 i->frame_number);
3884 i = erasePendingRequest(i);
3885 } else {
3886 LOGE("Could not find input request for frame number %d", frame_number);
3887 }
3888}
3889
3890/*===========================================================================
3891 * FUNCTION : handleBufferWithLock
3892 *
3893 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3894 *
3895 * PARAMETERS : @buffer: image buffer for the callback
3896 * @frame_number: frame number of the image buffer
3897 *
3898 * RETURN :
3899 *
3900 *==========================================================================*/
3901void QCamera3HardwareInterface::handleBufferWithLock(
3902 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3903{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003904 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003905
3906 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3907 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3908 }
3909
Thierry Strudel3d639192016-09-09 11:52:26 -07003910 /* Nothing to be done during error state */
3911 if ((ERROR == mState) || (DEINIT == mState)) {
3912 return;
3913 }
3914 if (mFlushPerf) {
3915 handleBuffersDuringFlushLock(buffer);
3916 return;
3917 }
3918 //not in flush
3919 // If the frame number doesn't exist in the pending request list,
3920 // directly send the buffer to the frameworks, and update pending buffers map
3921 // Otherwise, book-keep the buffer.
3922 pendingRequestIterator i = mPendingRequestsList.begin();
3923 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3924 i++;
3925 }
3926 if (i == mPendingRequestsList.end()) {
3927 // Verify all pending requests frame_numbers are greater
3928 for (pendingRequestIterator j = mPendingRequestsList.begin();
3929 j != mPendingRequestsList.end(); j++) {
3930 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3931 LOGW("Error: pending live frame number %d is smaller than %d",
3932 j->frame_number, frame_number);
3933 }
3934 }
3935 camera3_capture_result_t result;
3936 memset(&result, 0, sizeof(camera3_capture_result_t));
3937 result.result = NULL;
3938 result.frame_number = frame_number;
3939 result.num_output_buffers = 1;
3940 result.partial_result = 0;
3941 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3942 m != mPendingFrameDropList.end(); m++) {
3943 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3944 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3945 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3946 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3947 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3948 frame_number, streamID);
3949 m = mPendingFrameDropList.erase(m);
3950 break;
3951 }
3952 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003953 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003954 result.output_buffers = buffer;
3955 LOGH("result frame_number = %d, buffer = %p",
3956 frame_number, buffer->buffer);
3957
3958 mPendingBuffersMap.removeBuf(buffer->buffer);
3959
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003960 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003961 } else {
3962 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003963 if (i->input_buffer->release_fence != -1) {
3964 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3965 close(i->input_buffer->release_fence);
3966 if (rc != OK) {
3967 LOGE("input buffer sync wait failed %d", rc);
3968 }
3969 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003971
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003972 // Put buffer into the pending request
3973 for (auto &requestedBuffer : i->buffers) {
3974 if (requestedBuffer.stream == buffer->stream) {
3975 if (requestedBuffer.buffer != nullptr) {
3976 LOGE("Error: buffer is already set");
3977 } else {
3978 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3979 sizeof(camera3_stream_buffer_t));
3980 *(requestedBuffer.buffer) = *buffer;
3981 LOGH("cache buffer %p at result frame_number %u",
3982 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003983 }
3984 }
3985 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003986
3987 if (i->input_buffer) {
3988 // For a reprocessing request, try to send out shutter callback and result metadata.
3989 handlePendingResultsWithLock(frame_number, nullptr);
3990 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003991 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003992
3993 if (mPreviewStarted == false) {
3994 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3995 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07003996 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
3997
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003998 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3999 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4000 mPreviewStarted = true;
4001
4002 // Set power hint for preview
4003 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4004 }
4005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004006}
4007
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004008void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4009 const camera_metadata_t *resultMetadata)
4010{
4011 // Find the pending request for this result metadata.
4012 auto requestIter = mPendingRequestsList.begin();
4013 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4014 requestIter++;
4015 }
4016
4017 if (requestIter == mPendingRequestsList.end()) {
4018 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4019 return;
4020 }
4021
4022 // Update the result metadata
4023 requestIter->resultMetadata = resultMetadata;
4024
4025 // Check what type of request this is.
4026 bool liveRequest = false;
4027 if (requestIter->hdrplus) {
4028 // HDR+ request doesn't have partial results.
4029 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4030 } else if (requestIter->input_buffer != nullptr) {
4031 // Reprocessing request result is the same as settings.
4032 requestIter->resultMetadata = requestIter->settings;
4033 // Reprocessing request doesn't have partial results.
4034 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4035 } else {
4036 liveRequest = true;
4037 requestIter->partial_result_cnt++;
4038 mPendingLiveRequest--;
4039
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004040 {
4041 Mutex::Autolock l(gHdrPlusClientLock);
4042 // For a live request, send the metadata to HDR+ client.
4043 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4044 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4045 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4046 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004047 }
4048 }
4049
4050 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4051 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4052 bool readyToSend = true;
4053
4054 // Iterate through the pending requests to send out shutter callbacks and results that are
4055 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4056 // live requests that don't have result metadata yet.
4057 auto iter = mPendingRequestsList.begin();
4058 while (iter != mPendingRequestsList.end()) {
4059 // Check if current pending request is ready. If it's not ready, the following pending
4060 // requests are also not ready.
4061 if (readyToSend && iter->resultMetadata == nullptr) {
4062 readyToSend = false;
4063 }
4064
4065 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4066
4067 std::vector<camera3_stream_buffer_t> outputBuffers;
4068
4069 camera3_capture_result_t result = {};
4070 result.frame_number = iter->frame_number;
4071 result.result = iter->resultMetadata;
4072 result.partial_result = iter->partial_result_cnt;
4073
4074 // If this pending buffer has result metadata, we may be able to send out shutter callback
4075 // and result metadata.
4076 if (iter->resultMetadata != nullptr) {
4077 if (!readyToSend) {
4078 // If any of the previous pending request is not ready, this pending request is
4079 // also not ready to send in order to keep shutter callbacks and result metadata
4080 // in order.
4081 iter++;
4082 continue;
4083 }
4084
4085 // Invoke shutter callback if not yet.
4086 if (!iter->shutter_notified) {
4087 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4088
4089 // Find the timestamp in HDR+ result metadata
4090 camera_metadata_ro_entry_t entry;
4091 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4092 ANDROID_SENSOR_TIMESTAMP, &entry);
4093 if (res != OK) {
4094 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4095 __FUNCTION__, iter->frame_number, strerror(-res), res);
4096 } else {
4097 timestamp = entry.data.i64[0];
4098 }
4099
4100 camera3_notify_msg_t notify_msg = {};
4101 notify_msg.type = CAMERA3_MSG_SHUTTER;
4102 notify_msg.message.shutter.frame_number = iter->frame_number;
4103 notify_msg.message.shutter.timestamp = timestamp;
4104 orchestrateNotify(&notify_msg);
4105 iter->shutter_notified = true;
4106 }
4107
4108 result.input_buffer = iter->input_buffer;
4109
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4111 // If the result metadata belongs to a live request, notify errors for previous pending
4112 // live requests.
4113 mPendingLiveRequest--;
4114
4115 CameraMetadata dummyMetadata;
4116 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4117 result.result = dummyMetadata.release();
4118
4119 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004120
4121 // partial_result should be PARTIAL_RESULT_CNT in case of
4122 // ERROR_RESULT.
4123 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4124 result.partial_result = PARTIAL_RESULT_COUNT;
4125
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004126 } else {
4127 iter++;
4128 continue;
4129 }
4130
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004131 // Prepare output buffer array
4132 for (auto bufferInfoIter = iter->buffers.begin();
4133 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4134 if (bufferInfoIter->buffer != nullptr) {
4135
4136 QCamera3Channel *channel =
4137 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4138 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4139
4140 // Check if this buffer is a dropped frame.
4141 auto frameDropIter = mPendingFrameDropList.begin();
4142 while (frameDropIter != mPendingFrameDropList.end()) {
4143 if((frameDropIter->stream_ID == streamID) &&
4144 (frameDropIter->frame_number == frameNumber)) {
4145 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4146 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4147 streamID);
4148 mPendingFrameDropList.erase(frameDropIter);
4149 break;
4150 } else {
4151 frameDropIter++;
4152 }
4153 }
4154
4155 // Check buffer error status
4156 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4157 bufferInfoIter->buffer->buffer);
4158 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4159
4160 outputBuffers.push_back(*(bufferInfoIter->buffer));
4161 free(bufferInfoIter->buffer);
4162 bufferInfoIter->buffer = NULL;
4163 }
4164 }
4165
4166 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4167 result.num_output_buffers = outputBuffers.size();
4168
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004169 orchestrateResult(&result);
4170
4171 // For reprocessing, result metadata is the same as settings so do not free it here to
4172 // avoid double free.
4173 if (result.result != iter->settings) {
4174 free_camera_metadata((camera_metadata_t *)result.result);
4175 }
4176 iter->resultMetadata = nullptr;
4177 iter = erasePendingRequest(iter);
4178 }
4179
4180 if (liveRequest) {
4181 for (auto &iter : mPendingRequestsList) {
4182 // Increment pipeline depth for the following pending requests.
4183 if (iter.frame_number > frameNumber) {
4184 iter.pipeline_depth++;
4185 }
4186 }
4187 }
4188
4189 unblockRequestIfNecessary();
4190}
4191
Thierry Strudel3d639192016-09-09 11:52:26 -07004192/*===========================================================================
4193 * FUNCTION : unblockRequestIfNecessary
4194 *
4195 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4196 * that mMutex is held when this function is called.
4197 *
4198 * PARAMETERS :
4199 *
4200 * RETURN :
4201 *
4202 *==========================================================================*/
4203void QCamera3HardwareInterface::unblockRequestIfNecessary()
4204{
4205 // Unblock process_capture_request
4206 pthread_cond_signal(&mRequestCond);
4207}
4208
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004209/*===========================================================================
4210 * FUNCTION : isHdrSnapshotRequest
4211 *
4212 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4213 *
4214 * PARAMETERS : camera3 request structure
4215 *
4216 * RETURN : boolean decision variable
4217 *
4218 *==========================================================================*/
4219bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4220{
4221 if (request == NULL) {
4222 LOGE("Invalid request handle");
4223 assert(0);
4224 return false;
4225 }
4226
4227 if (!mForceHdrSnapshot) {
4228 CameraMetadata frame_settings;
4229 frame_settings = request->settings;
4230
4231 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4232 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4233 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4234 return false;
4235 }
4236 } else {
4237 return false;
4238 }
4239
4240 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4241 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4242 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4243 return false;
4244 }
4245 } else {
4246 return false;
4247 }
4248 }
4249
4250 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4251 if (request->output_buffers[i].stream->format
4252 == HAL_PIXEL_FORMAT_BLOB) {
4253 return true;
4254 }
4255 }
4256
4257 return false;
4258}
4259/*===========================================================================
4260 * FUNCTION : orchestrateRequest
4261 *
4262 * DESCRIPTION: Orchestrates a capture request from camera service
4263 *
4264 * PARAMETERS :
4265 * @request : request from framework to process
4266 *
4267 * RETURN : Error status codes
4268 *
4269 *==========================================================================*/
4270int32_t QCamera3HardwareInterface::orchestrateRequest(
4271 camera3_capture_request_t *request)
4272{
4273
4274 uint32_t originalFrameNumber = request->frame_number;
4275 uint32_t originalOutputCount = request->num_output_buffers;
4276 const camera_metadata_t *original_settings = request->settings;
4277 List<InternalRequest> internallyRequestedStreams;
4278 List<InternalRequest> emptyInternalList;
4279
4280 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4281 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4282 uint32_t internalFrameNumber;
4283 CameraMetadata modified_meta;
4284
4285
4286 /* Add Blob channel to list of internally requested streams */
4287 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4288 if (request->output_buffers[i].stream->format
4289 == HAL_PIXEL_FORMAT_BLOB) {
4290 InternalRequest streamRequested;
4291 streamRequested.meteringOnly = 1;
4292 streamRequested.need_metadata = 0;
4293 streamRequested.stream = request->output_buffers[i].stream;
4294 internallyRequestedStreams.push_back(streamRequested);
4295 }
4296 }
4297 request->num_output_buffers = 0;
4298 auto itr = internallyRequestedStreams.begin();
4299
4300 /* Modify setting to set compensation */
4301 modified_meta = request->settings;
4302 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4303 uint8_t aeLock = 1;
4304 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4305 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4306 camera_metadata_t *modified_settings = modified_meta.release();
4307 request->settings = modified_settings;
4308
4309 /* Capture Settling & -2x frame */
4310 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4311 request->frame_number = internalFrameNumber;
4312 processCaptureRequest(request, internallyRequestedStreams);
4313
4314 request->num_output_buffers = originalOutputCount;
4315 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4316 request->frame_number = internalFrameNumber;
4317 processCaptureRequest(request, emptyInternalList);
4318 request->num_output_buffers = 0;
4319
4320 modified_meta = modified_settings;
4321 expCompensation = 0;
4322 aeLock = 1;
4323 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4324 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4325 modified_settings = modified_meta.release();
4326 request->settings = modified_settings;
4327
4328 /* Capture Settling & 0X frame */
4329
4330 itr = internallyRequestedStreams.begin();
4331 if (itr == internallyRequestedStreams.end()) {
4332 LOGE("Error Internally Requested Stream list is empty");
4333 assert(0);
4334 } else {
4335 itr->need_metadata = 0;
4336 itr->meteringOnly = 1;
4337 }
4338
4339 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4340 request->frame_number = internalFrameNumber;
4341 processCaptureRequest(request, internallyRequestedStreams);
4342
4343 itr = internallyRequestedStreams.begin();
4344 if (itr == internallyRequestedStreams.end()) {
4345 ALOGE("Error Internally Requested Stream list is empty");
4346 assert(0);
4347 } else {
4348 itr->need_metadata = 1;
4349 itr->meteringOnly = 0;
4350 }
4351
4352 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4353 request->frame_number = internalFrameNumber;
4354 processCaptureRequest(request, internallyRequestedStreams);
4355
4356 /* Capture 2X frame*/
4357 modified_meta = modified_settings;
4358 expCompensation = GB_HDR_2X_STEP_EV;
4359 aeLock = 1;
4360 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4361 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4362 modified_settings = modified_meta.release();
4363 request->settings = modified_settings;
4364
4365 itr = internallyRequestedStreams.begin();
4366 if (itr == internallyRequestedStreams.end()) {
4367 ALOGE("Error Internally Requested Stream list is empty");
4368 assert(0);
4369 } else {
4370 itr->need_metadata = 0;
4371 itr->meteringOnly = 1;
4372 }
4373 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4374 request->frame_number = internalFrameNumber;
4375 processCaptureRequest(request, internallyRequestedStreams);
4376
4377 itr = internallyRequestedStreams.begin();
4378 if (itr == internallyRequestedStreams.end()) {
4379 ALOGE("Error Internally Requested Stream list is empty");
4380 assert(0);
4381 } else {
4382 itr->need_metadata = 1;
4383 itr->meteringOnly = 0;
4384 }
4385
4386 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 processCaptureRequest(request, internallyRequestedStreams);
4389
4390
4391 /* Capture 2X on original streaming config*/
4392 internallyRequestedStreams.clear();
4393
4394 /* Restore original settings pointer */
4395 request->settings = original_settings;
4396 } else {
4397 uint32_t internalFrameNumber;
4398 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4399 request->frame_number = internalFrameNumber;
4400 return processCaptureRequest(request, internallyRequestedStreams);
4401 }
4402
4403 return NO_ERROR;
4404}
4405
4406/*===========================================================================
4407 * FUNCTION : orchestrateResult
4408 *
4409 * DESCRIPTION: Orchestrates a capture result to camera service
4410 *
4411 * PARAMETERS :
4412 * @request : request from framework to process
4413 *
4414 * RETURN :
4415 *
4416 *==========================================================================*/
4417void QCamera3HardwareInterface::orchestrateResult(
4418 camera3_capture_result_t *result)
4419{
4420 uint32_t frameworkFrameNumber;
4421 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4422 frameworkFrameNumber);
4423 if (rc != NO_ERROR) {
4424 LOGE("Cannot find translated frameworkFrameNumber");
4425 assert(0);
4426 } else {
4427 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004428 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004429 } else {
4430 result->frame_number = frameworkFrameNumber;
4431 mCallbackOps->process_capture_result(mCallbackOps, result);
4432 }
4433 }
4434}
4435
4436/*===========================================================================
4437 * FUNCTION : orchestrateNotify
4438 *
4439 * DESCRIPTION: Orchestrates a notify to camera service
4440 *
4441 * PARAMETERS :
4442 * @request : request from framework to process
4443 *
4444 * RETURN :
4445 *
4446 *==========================================================================*/
4447void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4448{
4449 uint32_t frameworkFrameNumber;
4450 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004451 int32_t rc = NO_ERROR;
4452
4453 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004454 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004455
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004456 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004457 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4458 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4459 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004460 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004461 LOGE("Cannot find translated frameworkFrameNumber");
4462 assert(0);
4463 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004464 }
4465 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004466
4467 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4468 LOGD("Internal Request drop the notifyCb");
4469 } else {
4470 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4471 mCallbackOps->notify(mCallbackOps, notify_msg);
4472 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004473}
4474
4475/*===========================================================================
4476 * FUNCTION : FrameNumberRegistry
4477 *
4478 * DESCRIPTION: Constructor
4479 *
4480 * PARAMETERS :
4481 *
4482 * RETURN :
4483 *
4484 *==========================================================================*/
4485FrameNumberRegistry::FrameNumberRegistry()
4486{
4487 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4488}
4489
4490/*===========================================================================
4491 * FUNCTION : ~FrameNumberRegistry
4492 *
4493 * DESCRIPTION: Destructor
4494 *
4495 * PARAMETERS :
4496 *
4497 * RETURN :
4498 *
4499 *==========================================================================*/
4500FrameNumberRegistry::~FrameNumberRegistry()
4501{
4502}
4503
4504/*===========================================================================
4505 * FUNCTION : PurgeOldEntriesLocked
4506 *
4507 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4508 *
4509 * PARAMETERS :
4510 *
4511 * RETURN : NONE
4512 *
4513 *==========================================================================*/
4514void FrameNumberRegistry::purgeOldEntriesLocked()
4515{
4516 while (_register.begin() != _register.end()) {
4517 auto itr = _register.begin();
4518 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4519 _register.erase(itr);
4520 } else {
4521 return;
4522 }
4523 }
4524}
4525
4526/*===========================================================================
4527 * FUNCTION : allocStoreInternalFrameNumber
4528 *
4529 * DESCRIPTION: Method to note down a framework request and associate a new
4530 * internal request number against it
4531 *
4532 * PARAMETERS :
4533 * @fFrameNumber: Identifier given by framework
4534 * @internalFN : Output parameter which will have the newly generated internal
4535 * entry
4536 *
4537 * RETURN : Error code
4538 *
4539 *==========================================================================*/
4540int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4541 uint32_t &internalFrameNumber)
4542{
4543 Mutex::Autolock lock(mRegistryLock);
4544 internalFrameNumber = _nextFreeInternalNumber++;
4545 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4546 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4547 purgeOldEntriesLocked();
4548 return NO_ERROR;
4549}
4550
4551/*===========================================================================
4552 * FUNCTION : generateStoreInternalFrameNumber
4553 *
4554 * DESCRIPTION: Method to associate a new internal request number independent
4555 * of any associate with framework requests
4556 *
4557 * PARAMETERS :
4558 * @internalFrame#: Output parameter which will have the newly generated internal
4559 *
4560 *
4561 * RETURN : Error code
4562 *
4563 *==========================================================================*/
4564int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4565{
4566 Mutex::Autolock lock(mRegistryLock);
4567 internalFrameNumber = _nextFreeInternalNumber++;
4568 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4569 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4570 purgeOldEntriesLocked();
4571 return NO_ERROR;
4572}
4573
4574/*===========================================================================
4575 * FUNCTION : getFrameworkFrameNumber
4576 *
4577 * DESCRIPTION: Method to query the framework framenumber given an internal #
4578 *
4579 * PARAMETERS :
4580 * @internalFrame#: Internal reference
4581 * @frameworkframenumber: Output parameter holding framework frame entry
4582 *
4583 * RETURN : Error code
4584 *
4585 *==========================================================================*/
4586int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4587 uint32_t &frameworkFrameNumber)
4588{
4589 Mutex::Autolock lock(mRegistryLock);
4590 auto itr = _register.find(internalFrameNumber);
4591 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004592 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004593 return -ENOENT;
4594 }
4595
4596 frameworkFrameNumber = itr->second;
4597 purgeOldEntriesLocked();
4598 return NO_ERROR;
4599}
Thierry Strudel3d639192016-09-09 11:52:26 -07004600
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004601status_t QCamera3HardwareInterface::fillPbStreamConfig(
4602 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4603 QCamera3Channel *channel, uint32_t streamIndex) {
4604 if (config == nullptr) {
4605 LOGE("%s: config is null", __FUNCTION__);
4606 return BAD_VALUE;
4607 }
4608
4609 if (channel == nullptr) {
4610 LOGE("%s: channel is null", __FUNCTION__);
4611 return BAD_VALUE;
4612 }
4613
4614 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4615 if (stream == nullptr) {
4616 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4617 return NAME_NOT_FOUND;
4618 }
4619
4620 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4621 if (streamInfo == nullptr) {
4622 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4623 return NAME_NOT_FOUND;
4624 }
4625
4626 config->id = pbStreamId;
4627 config->image.width = streamInfo->dim.width;
4628 config->image.height = streamInfo->dim.height;
4629 config->image.padding = 0;
4630 config->image.format = pbStreamFormat;
4631
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004632 uint32_t totalPlaneSize = 0;
4633
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004634 // Fill plane information.
4635 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4636 pbcamera::PlaneConfiguration plane;
4637 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4638 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4639 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004640
4641 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004642 }
4643
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004644 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004645 return OK;
4646}
4647
Thierry Strudel3d639192016-09-09 11:52:26 -07004648/*===========================================================================
4649 * FUNCTION : processCaptureRequest
4650 *
4651 * DESCRIPTION: process a capture request from camera service
4652 *
4653 * PARAMETERS :
4654 * @request : request from framework to process
4655 *
4656 * RETURN :
4657 *
4658 *==========================================================================*/
4659int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004660 camera3_capture_request_t *request,
4661 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004662{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004663 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004664 int rc = NO_ERROR;
4665 int32_t request_id;
4666 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004667 bool isVidBufRequested = false;
4668 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004669 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004670
4671 pthread_mutex_lock(&mMutex);
4672
4673 // Validate current state
4674 switch (mState) {
4675 case CONFIGURED:
4676 case STARTED:
4677 /* valid state */
4678 break;
4679
4680 case ERROR:
4681 pthread_mutex_unlock(&mMutex);
4682 handleCameraDeviceError();
4683 return -ENODEV;
4684
4685 default:
4686 LOGE("Invalid state %d", mState);
4687 pthread_mutex_unlock(&mMutex);
4688 return -ENODEV;
4689 }
4690
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004691 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004692 if (rc != NO_ERROR) {
4693 LOGE("incoming request is not valid");
4694 pthread_mutex_unlock(&mMutex);
4695 return rc;
4696 }
4697
4698 meta = request->settings;
4699
4700 // For first capture request, send capture intent, and
4701 // stream on all streams
4702 if (mState == CONFIGURED) {
4703 // send an unconfigure to the backend so that the isp
4704 // resources are deallocated
4705 if (!mFirstConfiguration) {
4706 cam_stream_size_info_t stream_config_info;
4707 int32_t hal_version = CAM_HAL_V3;
4708 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4709 stream_config_info.buffer_info.min_buffers =
4710 MIN_INFLIGHT_REQUESTS;
4711 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004712 m_bIs4KVideo ? 0 :
4713 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004714 clear_metadata_buffer(mParameters);
4715 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4716 CAM_INTF_PARM_HAL_VERSION, hal_version);
4717 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4718 CAM_INTF_META_STREAM_INFO, stream_config_info);
4719 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4720 mParameters);
4721 if (rc < 0) {
4722 LOGE("set_parms for unconfigure failed");
4723 pthread_mutex_unlock(&mMutex);
4724 return rc;
4725 }
4726 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004727 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004728 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004729 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004730 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004731 property_get("persist.camera.is_type", is_type_value, "4");
4732 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4733 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4734 property_get("persist.camera.is_type_preview", is_type_value, "4");
4735 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4736 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004737
4738 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4739 int32_t hal_version = CAM_HAL_V3;
4740 uint8_t captureIntent =
4741 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4742 mCaptureIntent = captureIntent;
4743 clear_metadata_buffer(mParameters);
4744 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4746 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004747 if (mFirstConfiguration) {
4748 // configure instant AEC
4749 // Instant AEC is a session based parameter and it is needed only
4750 // once per complete session after open camera.
4751 // i.e. This is set only once for the first capture request, after open camera.
4752 setInstantAEC(meta);
4753 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004754 uint8_t fwkVideoStabMode=0;
4755 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4756 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4757 }
4758
4759 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4760 // turn it on for video/preview
4761 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4762 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 int32_t vsMode;
4764 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4766 rc = BAD_VALUE;
4767 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004768 LOGD("setEis %d", setEis);
4769 bool eis3Supported = false;
4770 size_t count = IS_TYPE_MAX;
4771 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4772 for (size_t i = 0; i < count; i++) {
4773 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4774 eis3Supported = true;
4775 break;
4776 }
4777 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004778
4779 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004780 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4782 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004783 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4784 is_type = isTypePreview;
4785 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4786 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4787 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004788 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004789 } else {
4790 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004791 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004792 } else {
4793 is_type = IS_TYPE_NONE;
4794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004796 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4798 }
4799 }
4800
4801 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4802 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4803
Thierry Strudel54dc9782017-02-15 12:12:10 -08004804 //Disable tintless only if the property is set to 0
4805 memset(prop, 0, sizeof(prop));
4806 property_get("persist.camera.tintless.enable", prop, "1");
4807 int32_t tintless_value = atoi(prop);
4808
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4810 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004811
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 //Disable CDS for HFR mode or if DIS/EIS is on.
4813 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4814 //after every configure_stream
4815 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4816 (m_bIsVideo)) {
4817 int32_t cds = CAM_CDS_MODE_OFF;
4818 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4819 CAM_INTF_PARM_CDS_MODE, cds))
4820 LOGE("Failed to disable CDS for HFR mode");
4821
4822 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004823
4824 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4825 uint8_t* use_av_timer = NULL;
4826
4827 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004828 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004829 use_av_timer = &m_debug_avtimer;
4830 }
4831 else{
4832 use_av_timer =
4833 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004834 if (use_av_timer) {
4835 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4836 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 }
4838
4839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4840 rc = BAD_VALUE;
4841 }
4842 }
4843
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 setMobicat();
4845
4846 /* Set fps and hfr mode while sending meta stream info so that sensor
4847 * can configure appropriate streaming mode */
4848 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004849 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4850 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004851 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4852 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004853 if (rc == NO_ERROR) {
4854 int32_t max_fps =
4855 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004856 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004857 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4858 }
4859 /* For HFR, more buffers are dequeued upfront to improve the performance */
4860 if (mBatchSize) {
4861 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4862 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4863 }
4864 }
4865 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 LOGE("setHalFpsRange failed");
4867 }
4868 }
4869 if (meta.exists(ANDROID_CONTROL_MODE)) {
4870 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4871 rc = extractSceneMode(meta, metaMode, mParameters);
4872 if (rc != NO_ERROR) {
4873 LOGE("extractSceneMode failed");
4874 }
4875 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004876 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004877
Thierry Strudel04e026f2016-10-10 11:27:36 -07004878 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4879 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4880 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4881 rc = setVideoHdrMode(mParameters, vhdr);
4882 if (rc != NO_ERROR) {
4883 LOGE("setVideoHDR is failed");
4884 }
4885 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886
Thierry Strudel3d639192016-09-09 11:52:26 -07004887 //TODO: validate the arguments, HSV scenemode should have only the
4888 //advertised fps ranges
4889
4890 /*set the capture intent, hal version, tintless, stream info,
4891 *and disenable parameters to the backend*/
4892 LOGD("set_parms META_STREAM_INFO " );
4893 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004894 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4895 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 mStreamConfigInfo.type[i],
4897 mStreamConfigInfo.stream_sizes[i].width,
4898 mStreamConfigInfo.stream_sizes[i].height,
4899 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 mStreamConfigInfo.format[i],
4901 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004902 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4905 mParameters);
4906 if (rc < 0) {
4907 LOGE("set_parms failed for hal version, stream info");
4908 }
4909
Chien-Yu Chenee335912017-02-09 17:53:20 -08004910 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4911 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 if (rc != NO_ERROR) {
4913 LOGE("Failed to get sensor output size");
4914 pthread_mutex_unlock(&mMutex);
4915 goto error_exit;
4916 }
4917
4918 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4919 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004920 mSensorModeInfo.active_array_size.width,
4921 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004922
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004923 {
4924 Mutex::Autolock l(gHdrPlusClientLock);
4925 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004926 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004927 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4928 if (rc != OK) {
4929 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4930 mCameraId, mSensorModeInfo.op_pixel_clk);
4931 pthread_mutex_unlock(&mMutex);
4932 goto error_exit;
4933 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004934 }
4935 }
4936
Thierry Strudel3d639192016-09-09 11:52:26 -07004937 /* Set batchmode before initializing channel. Since registerBuffer
4938 * internally initializes some of the channels, better set batchmode
4939 * even before first register buffer */
4940 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4941 it != mStreamInfo.end(); it++) {
4942 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4943 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4944 && mBatchSize) {
4945 rc = channel->setBatchSize(mBatchSize);
4946 //Disable per frame map unmap for HFR/batchmode case
4947 rc |= channel->setPerFrameMapUnmap(false);
4948 if (NO_ERROR != rc) {
4949 LOGE("Channel init failed %d", rc);
4950 pthread_mutex_unlock(&mMutex);
4951 goto error_exit;
4952 }
4953 }
4954 }
4955
4956 //First initialize all streams
4957 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4958 it != mStreamInfo.end(); it++) {
4959 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4960 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4961 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004962 setEis) {
4963 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4964 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4965 is_type = mStreamConfigInfo.is_type[i];
4966 break;
4967 }
4968 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004969 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 rc = channel->initialize(IS_TYPE_NONE);
4972 }
4973 if (NO_ERROR != rc) {
4974 LOGE("Channel initialization failed %d", rc);
4975 pthread_mutex_unlock(&mMutex);
4976 goto error_exit;
4977 }
4978 }
4979
4980 if (mRawDumpChannel) {
4981 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4982 if (rc != NO_ERROR) {
4983 LOGE("Error: Raw Dump Channel init failed");
4984 pthread_mutex_unlock(&mMutex);
4985 goto error_exit;
4986 }
4987 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004988 if (mHdrPlusRawSrcChannel) {
4989 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4990 if (rc != NO_ERROR) {
4991 LOGE("Error: HDR+ RAW Source Channel init failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
4995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004996 if (mSupportChannel) {
4997 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4998 if (rc < 0) {
4999 LOGE("Support channel initialization failed");
5000 pthread_mutex_unlock(&mMutex);
5001 goto error_exit;
5002 }
5003 }
5004 if (mAnalysisChannel) {
5005 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5006 if (rc < 0) {
5007 LOGE("Analysis channel initialization failed");
5008 pthread_mutex_unlock(&mMutex);
5009 goto error_exit;
5010 }
5011 }
5012 if (mDummyBatchChannel) {
5013 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5014 if (rc < 0) {
5015 LOGE("mDummyBatchChannel setBatchSize failed");
5016 pthread_mutex_unlock(&mMutex);
5017 goto error_exit;
5018 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005019 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 if (rc < 0) {
5021 LOGE("mDummyBatchChannel initialization failed");
5022 pthread_mutex_unlock(&mMutex);
5023 goto error_exit;
5024 }
5025 }
5026
5027 // Set bundle info
5028 rc = setBundleInfo();
5029 if (rc < 0) {
5030 LOGE("setBundleInfo failed %d", rc);
5031 pthread_mutex_unlock(&mMutex);
5032 goto error_exit;
5033 }
5034
5035 //update settings from app here
5036 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5037 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5038 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5039 }
5040 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5041 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5042 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5043 }
5044 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5045 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5046 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5047
5048 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5049 (mLinkedCameraId != mCameraId) ) {
5050 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5051 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005052 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 goto error_exit;
5054 }
5055 }
5056
5057 // add bundle related cameras
5058 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5059 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005060 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5061 &m_pDualCamCmdPtr->bundle_info;
5062 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 if (mIsDeviceLinked)
5064 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5065 else
5066 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5067
5068 pthread_mutex_lock(&gCamLock);
5069
5070 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5071 LOGE("Dualcam: Invalid Session Id ");
5072 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005073 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005074 goto error_exit;
5075 }
5076
5077 if (mIsMainCamera == 1) {
5078 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5079 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005080 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005081 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 // related session id should be session id of linked session
5083 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5084 } else {
5085 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5086 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005087 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005088 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005089 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5090 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005091 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005092 pthread_mutex_unlock(&gCamLock);
5093
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005094 rc = mCameraHandle->ops->set_dual_cam_cmd(
5095 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005096 if (rc < 0) {
5097 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005098 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 goto error_exit;
5100 }
5101 }
5102
5103 //Then start them.
5104 LOGH("Start META Channel");
5105 rc = mMetadataChannel->start();
5106 if (rc < 0) {
5107 LOGE("META channel start failed");
5108 pthread_mutex_unlock(&mMutex);
5109 goto error_exit;
5110 }
5111
5112 if (mAnalysisChannel) {
5113 rc = mAnalysisChannel->start();
5114 if (rc < 0) {
5115 LOGE("Analysis channel start failed");
5116 mMetadataChannel->stop();
5117 pthread_mutex_unlock(&mMutex);
5118 goto error_exit;
5119 }
5120 }
5121
5122 if (mSupportChannel) {
5123 rc = mSupportChannel->start();
5124 if (rc < 0) {
5125 LOGE("Support channel start failed");
5126 mMetadataChannel->stop();
5127 /* Although support and analysis are mutually exclusive today
5128 adding it in anycase for future proofing */
5129 if (mAnalysisChannel) {
5130 mAnalysisChannel->stop();
5131 }
5132 pthread_mutex_unlock(&mMutex);
5133 goto error_exit;
5134 }
5135 }
5136 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5137 it != mStreamInfo.end(); it++) {
5138 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5139 LOGH("Start Processing Channel mask=%d",
5140 channel->getStreamTypeMask());
5141 rc = channel->start();
5142 if (rc < 0) {
5143 LOGE("channel start failed");
5144 pthread_mutex_unlock(&mMutex);
5145 goto error_exit;
5146 }
5147 }
5148
5149 if (mRawDumpChannel) {
5150 LOGD("Starting raw dump stream");
5151 rc = mRawDumpChannel->start();
5152 if (rc != NO_ERROR) {
5153 LOGE("Error Starting Raw Dump Channel");
5154 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5155 it != mStreamInfo.end(); it++) {
5156 QCamera3Channel *channel =
5157 (QCamera3Channel *)(*it)->stream->priv;
5158 LOGH("Stopping Processing Channel mask=%d",
5159 channel->getStreamTypeMask());
5160 channel->stop();
5161 }
5162 if (mSupportChannel)
5163 mSupportChannel->stop();
5164 if (mAnalysisChannel) {
5165 mAnalysisChannel->stop();
5166 }
5167 mMetadataChannel->stop();
5168 pthread_mutex_unlock(&mMutex);
5169 goto error_exit;
5170 }
5171 }
5172
5173 if (mChannelHandle) {
5174
5175 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5176 mChannelHandle);
5177 if (rc != NO_ERROR) {
5178 LOGE("start_channel failed %d", rc);
5179 pthread_mutex_unlock(&mMutex);
5180 goto error_exit;
5181 }
5182 }
5183
5184 goto no_error;
5185error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005186 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005187 return rc;
5188no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005189 mWokenUpByDaemon = false;
5190 mPendingLiveRequest = 0;
5191 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005192 }
5193
Chien-Yu Chenee335912017-02-09 17:53:20 -08005194 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005195 {
5196 Mutex::Autolock l(gHdrPlusClientLock);
5197 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5198 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5199 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5200 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5201 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5202 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005203 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005204 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005205 pthread_mutex_unlock(&mMutex);
5206 return rc;
5207 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005208
5209 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005210 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005211 }
5212
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005214 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005215
5216 if (mFlushPerf) {
5217 //we cannot accept any requests during flush
5218 LOGE("process_capture_request cannot proceed during flush");
5219 pthread_mutex_unlock(&mMutex);
5220 return NO_ERROR; //should return an error
5221 }
5222
5223 if (meta.exists(ANDROID_REQUEST_ID)) {
5224 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5225 mCurrentRequestId = request_id;
5226 LOGD("Received request with id: %d", request_id);
5227 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5228 LOGE("Unable to find request id field, \
5229 & no previous id available");
5230 pthread_mutex_unlock(&mMutex);
5231 return NAME_NOT_FOUND;
5232 } else {
5233 LOGD("Re-using old request id");
5234 request_id = mCurrentRequestId;
5235 }
5236
5237 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5238 request->num_output_buffers,
5239 request->input_buffer,
5240 frameNumber);
5241 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005242 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005243 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005244 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 uint32_t snapshotStreamId = 0;
5246 for (size_t i = 0; i < request->num_output_buffers; i++) {
5247 const camera3_stream_buffer_t& output = request->output_buffers[i];
5248 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5249
Emilian Peev7650c122017-01-19 08:24:33 -08005250 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5251 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005252 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 blob_request = 1;
5254 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5255 }
5256
5257 if (output.acquire_fence != -1) {
5258 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5259 close(output.acquire_fence);
5260 if (rc != OK) {
5261 LOGE("sync wait failed %d", rc);
5262 pthread_mutex_unlock(&mMutex);
5263 return rc;
5264 }
5265 }
5266
Emilian Peev0f3c3162017-03-15 12:57:46 +00005267 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5268 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005269 depthRequestPresent = true;
5270 continue;
5271 }
5272
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005273 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005274 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005275
5276 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5277 isVidBufRequested = true;
5278 }
5279 }
5280
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005281 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5282 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5283 itr++) {
5284 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5285 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5286 channel->getStreamID(channel->getStreamTypeMask());
5287
5288 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5289 isVidBufRequested = true;
5290 }
5291 }
5292
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005294 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005295 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005296 }
5297 if (blob_request && mRawDumpChannel) {
5298 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005299 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005300 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005301 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005302 }
5303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005304 {
5305 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5306 // Request a RAW buffer if
5307 // 1. mHdrPlusRawSrcChannel is valid.
5308 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5309 // 3. There is no pending HDR+ request.
5310 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5311 mHdrPlusPendingRequests.size() == 0) {
5312 streamsArray.stream_request[streamsArray.num_streams].streamID =
5313 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5314 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5315 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005316 }
5317
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005318 //extract capture intent
5319 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5320 mCaptureIntent =
5321 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5322 }
5323
5324 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5325 mCacMode =
5326 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5327 }
5328
5329 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005330 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005331
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005332 {
5333 Mutex::Autolock l(gHdrPlusClientLock);
5334 // If this request has a still capture intent, try to submit an HDR+ request.
5335 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5336 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5337 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5338 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005339 }
5340
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005341 if (hdrPlusRequest) {
5342 // For a HDR+ request, just set the frame parameters.
5343 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5344 if (rc < 0) {
5345 LOGE("fail to set frame parameters");
5346 pthread_mutex_unlock(&mMutex);
5347 return rc;
5348 }
5349 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005350 /* Parse the settings:
5351 * - For every request in NORMAL MODE
5352 * - For every request in HFR mode during preview only case
5353 * - For first request of every batch in HFR mode during video
5354 * recording. In batchmode the same settings except frame number is
5355 * repeated in each request of the batch.
5356 */
5357 if (!mBatchSize ||
5358 (mBatchSize && !isVidBufRequested) ||
5359 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005360 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 if (rc < 0) {
5362 LOGE("fail to set frame parameters");
5363 pthread_mutex_unlock(&mMutex);
5364 return rc;
5365 }
5366 }
5367 /* For batchMode HFR, setFrameParameters is not called for every
5368 * request. But only frame number of the latest request is parsed.
5369 * Keep track of first and last frame numbers in a batch so that
5370 * metadata for the frame numbers of batch can be duplicated in
5371 * handleBatchMetadta */
5372 if (mBatchSize) {
5373 if (!mToBeQueuedVidBufs) {
5374 //start of the batch
5375 mFirstFrameNumberInBatch = request->frame_number;
5376 }
5377 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5378 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5379 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005380 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005381 return BAD_VALUE;
5382 }
5383 }
5384 if (mNeedSensorRestart) {
5385 /* Unlock the mutex as restartSensor waits on the channels to be
5386 * stopped, which in turn calls stream callback functions -
5387 * handleBufferWithLock and handleMetadataWithLock */
5388 pthread_mutex_unlock(&mMutex);
5389 rc = dynamicUpdateMetaStreamInfo();
5390 if (rc != NO_ERROR) {
5391 LOGE("Restarting the sensor failed");
5392 return BAD_VALUE;
5393 }
5394 mNeedSensorRestart = false;
5395 pthread_mutex_lock(&mMutex);
5396 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005397 if(mResetInstantAEC) {
5398 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5399 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5400 mResetInstantAEC = false;
5401 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005402 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005403 if (request->input_buffer->acquire_fence != -1) {
5404 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5405 close(request->input_buffer->acquire_fence);
5406 if (rc != OK) {
5407 LOGE("input buffer sync wait failed %d", rc);
5408 pthread_mutex_unlock(&mMutex);
5409 return rc;
5410 }
5411 }
5412 }
5413
5414 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5415 mLastCustIntentFrmNum = frameNumber;
5416 }
5417 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005418 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005419 pendingRequestIterator latestRequest;
5420 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005421 pendingRequest.num_buffers = depthRequestPresent ?
5422 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 pendingRequest.request_id = request_id;
5424 pendingRequest.blob_request = blob_request;
5425 pendingRequest.timestamp = 0;
5426 pendingRequest.bUrgentReceived = 0;
5427 if (request->input_buffer) {
5428 pendingRequest.input_buffer =
5429 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5430 *(pendingRequest.input_buffer) = *(request->input_buffer);
5431 pInputBuffer = pendingRequest.input_buffer;
5432 } else {
5433 pendingRequest.input_buffer = NULL;
5434 pInputBuffer = NULL;
5435 }
5436
5437 pendingRequest.pipeline_depth = 0;
5438 pendingRequest.partial_result_cnt = 0;
5439 extractJpegMetadata(mCurJpegMeta, request);
5440 pendingRequest.jpegMetadata = mCurJpegMeta;
5441 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5442 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005444 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5445 mHybridAeEnable =
5446 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5447 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005448
5449 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5450 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005451 /* DevCamDebug metadata processCaptureRequest */
5452 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5453 mDevCamDebugMetaEnable =
5454 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5455 }
5456 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5457 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005458
5459 //extract CAC info
5460 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5461 mCacMode =
5462 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5463 }
5464 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005465 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005466
5467 PendingBuffersInRequest bufsForCurRequest;
5468 bufsForCurRequest.frame_number = frameNumber;
5469 // Mark current timestamp for the new request
5470 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005471 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005472
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005473 if (hdrPlusRequest) {
5474 // Save settings for this request.
5475 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5476 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5477
5478 // Add to pending HDR+ request queue.
5479 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5480 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5481
5482 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5483 }
5484
Thierry Strudel3d639192016-09-09 11:52:26 -07005485 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005486 if ((request->output_buffers[i].stream->data_space ==
5487 HAL_DATASPACE_DEPTH) &&
5488 (HAL_PIXEL_FORMAT_BLOB ==
5489 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005490 continue;
5491 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005492 RequestedBufferInfo requestedBuf;
5493 memset(&requestedBuf, 0, sizeof(requestedBuf));
5494 requestedBuf.stream = request->output_buffers[i].stream;
5495 requestedBuf.buffer = NULL;
5496 pendingRequest.buffers.push_back(requestedBuf);
5497
5498 // Add to buffer handle the pending buffers list
5499 PendingBufferInfo bufferInfo;
5500 bufferInfo.buffer = request->output_buffers[i].buffer;
5501 bufferInfo.stream = request->output_buffers[i].stream;
5502 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5503 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5504 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5505 frameNumber, bufferInfo.buffer,
5506 channel->getStreamTypeMask(), bufferInfo.stream->format);
5507 }
5508 // Add this request packet into mPendingBuffersMap
5509 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5510 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5511 mPendingBuffersMap.get_num_overall_buffers());
5512
5513 latestRequest = mPendingRequestsList.insert(
5514 mPendingRequestsList.end(), pendingRequest);
5515 if(mFlush) {
5516 LOGI("mFlush is true");
5517 pthread_mutex_unlock(&mMutex);
5518 return NO_ERROR;
5519 }
5520
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005521 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5522 // channel.
5523 if (!hdrPlusRequest) {
5524 int indexUsed;
5525 // Notify metadata channel we receive a request
5526 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005527
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005528 if(request->input_buffer != NULL){
5529 LOGD("Input request, frame_number %d", frameNumber);
5530 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5531 if (NO_ERROR != rc) {
5532 LOGE("fail to set reproc parameters");
5533 pthread_mutex_unlock(&mMutex);
5534 return rc;
5535 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005536 }
5537
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005538 // Call request on other streams
5539 uint32_t streams_need_metadata = 0;
5540 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5541 for (size_t i = 0; i < request->num_output_buffers; i++) {
5542 const camera3_stream_buffer_t& output = request->output_buffers[i];
5543 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5544
5545 if (channel == NULL) {
5546 LOGW("invalid channel pointer for stream");
5547 continue;
5548 }
5549
5550 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5551 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5552 output.buffer, request->input_buffer, frameNumber);
5553 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005554 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5556 if (rc < 0) {
5557 LOGE("Fail to request on picture channel");
5558 pthread_mutex_unlock(&mMutex);
5559 return rc;
5560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005561 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005562 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5563 assert(NULL != mDepthChannel);
5564 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565
Emilian Peev7650c122017-01-19 08:24:33 -08005566 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5567 if (rc < 0) {
5568 LOGE("Fail to map on depth buffer");
5569 pthread_mutex_unlock(&mMutex);
5570 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 }
Emilian Peev7650c122017-01-19 08:24:33 -08005572 } else {
5573 LOGD("snapshot request with buffer %p, frame_number %d",
5574 output.buffer, frameNumber);
5575 if (!request->settings) {
5576 rc = channel->request(output.buffer, frameNumber,
5577 NULL, mPrevParameters, indexUsed);
5578 } else {
5579 rc = channel->request(output.buffer, frameNumber,
5580 NULL, mParameters, indexUsed);
5581 }
5582 if (rc < 0) {
5583 LOGE("Fail to request on picture channel");
5584 pthread_mutex_unlock(&mMutex);
5585 return rc;
5586 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005587
Emilian Peev7650c122017-01-19 08:24:33 -08005588 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5589 uint32_t j = 0;
5590 for (j = 0; j < streamsArray.num_streams; j++) {
5591 if (streamsArray.stream_request[j].streamID == streamId) {
5592 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5593 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5594 else
5595 streamsArray.stream_request[j].buf_index = indexUsed;
5596 break;
5597 }
5598 }
5599 if (j == streamsArray.num_streams) {
5600 LOGE("Did not find matching stream to update index");
5601 assert(0);
5602 }
5603
5604 pendingBufferIter->need_metadata = true;
5605 streams_need_metadata++;
5606 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005607 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005608 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5609 bool needMetadata = false;
5610 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5611 rc = yuvChannel->request(output.buffer, frameNumber,
5612 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5613 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005614 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005616 pthread_mutex_unlock(&mMutex);
5617 return rc;
5618 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005619
5620 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5621 uint32_t j = 0;
5622 for (j = 0; j < streamsArray.num_streams; j++) {
5623 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005624 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5625 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5626 else
5627 streamsArray.stream_request[j].buf_index = indexUsed;
5628 break;
5629 }
5630 }
5631 if (j == streamsArray.num_streams) {
5632 LOGE("Did not find matching stream to update index");
5633 assert(0);
5634 }
5635
5636 pendingBufferIter->need_metadata = needMetadata;
5637 if (needMetadata)
5638 streams_need_metadata += 1;
5639 LOGD("calling YUV channel request, need_metadata is %d",
5640 needMetadata);
5641 } else {
5642 LOGD("request with buffer %p, frame_number %d",
5643 output.buffer, frameNumber);
5644
5645 rc = channel->request(output.buffer, frameNumber, indexUsed);
5646
5647 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5648 uint32_t j = 0;
5649 for (j = 0; j < streamsArray.num_streams; j++) {
5650 if (streamsArray.stream_request[j].streamID == streamId) {
5651 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5652 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5653 else
5654 streamsArray.stream_request[j].buf_index = indexUsed;
5655 break;
5656 }
5657 }
5658 if (j == streamsArray.num_streams) {
5659 LOGE("Did not find matching stream to update index");
5660 assert(0);
5661 }
5662
5663 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5664 && mBatchSize) {
5665 mToBeQueuedVidBufs++;
5666 if (mToBeQueuedVidBufs == mBatchSize) {
5667 channel->queueBatchBuf();
5668 }
5669 }
5670 if (rc < 0) {
5671 LOGE("request failed");
5672 pthread_mutex_unlock(&mMutex);
5673 return rc;
5674 }
5675 }
5676 pendingBufferIter++;
5677 }
5678
5679 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5680 itr++) {
5681 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5682
5683 if (channel == NULL) {
5684 LOGE("invalid channel pointer for stream");
5685 assert(0);
5686 return BAD_VALUE;
5687 }
5688
5689 InternalRequest requestedStream;
5690 requestedStream = (*itr);
5691
5692
5693 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5694 LOGD("snapshot request internally input buffer %p, frame_number %d",
5695 request->input_buffer, frameNumber);
5696 if(request->input_buffer != NULL){
5697 rc = channel->request(NULL, frameNumber,
5698 pInputBuffer, &mReprocMeta, indexUsed, true,
5699 requestedStream.meteringOnly);
5700 if (rc < 0) {
5701 LOGE("Fail to request on picture channel");
5702 pthread_mutex_unlock(&mMutex);
5703 return rc;
5704 }
5705 } else {
5706 LOGD("snapshot request with frame_number %d", frameNumber);
5707 if (!request->settings) {
5708 rc = channel->request(NULL, frameNumber,
5709 NULL, mPrevParameters, indexUsed, true,
5710 requestedStream.meteringOnly);
5711 } else {
5712 rc = channel->request(NULL, frameNumber,
5713 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5714 }
5715 if (rc < 0) {
5716 LOGE("Fail to request on picture channel");
5717 pthread_mutex_unlock(&mMutex);
5718 return rc;
5719 }
5720
5721 if ((*itr).meteringOnly != 1) {
5722 requestedStream.need_metadata = 1;
5723 streams_need_metadata++;
5724 }
5725 }
5726
5727 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5728 uint32_t j = 0;
5729 for (j = 0; j < streamsArray.num_streams; j++) {
5730 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005731 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5732 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5733 else
5734 streamsArray.stream_request[j].buf_index = indexUsed;
5735 break;
5736 }
5737 }
5738 if (j == streamsArray.num_streams) {
5739 LOGE("Did not find matching stream to update index");
5740 assert(0);
5741 }
5742
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005743 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005744 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005745 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005746 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005747 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005748 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005749 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005750
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 //If 2 streams have need_metadata set to true, fail the request, unless
5752 //we copy/reference count the metadata buffer
5753 if (streams_need_metadata > 1) {
5754 LOGE("not supporting request in which two streams requires"
5755 " 2 HAL metadata for reprocessing");
5756 pthread_mutex_unlock(&mMutex);
5757 return -EINVAL;
5758 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005759
Emilian Peev7650c122017-01-19 08:24:33 -08005760 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5762 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5763 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5764 pthread_mutex_unlock(&mMutex);
5765 return BAD_VALUE;
5766 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (request->input_buffer == NULL) {
5768 /* Set the parameters to backend:
5769 * - For every request in NORMAL MODE
5770 * - For every request in HFR mode during preview only case
5771 * - Once every batch in HFR mode during video recording
5772 */
5773 if (!mBatchSize ||
5774 (mBatchSize && !isVidBufRequested) ||
5775 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5776 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5777 mBatchSize, isVidBufRequested,
5778 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005779
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005780 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5781 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5782 uint32_t m = 0;
5783 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5784 if (streamsArray.stream_request[k].streamID ==
5785 mBatchedStreamsArray.stream_request[m].streamID)
5786 break;
5787 }
5788 if (m == mBatchedStreamsArray.num_streams) {
5789 mBatchedStreamsArray.stream_request\
5790 [mBatchedStreamsArray.num_streams].streamID =
5791 streamsArray.stream_request[k].streamID;
5792 mBatchedStreamsArray.stream_request\
5793 [mBatchedStreamsArray.num_streams].buf_index =
5794 streamsArray.stream_request[k].buf_index;
5795 mBatchedStreamsArray.num_streams =
5796 mBatchedStreamsArray.num_streams + 1;
5797 }
5798 }
5799 streamsArray = mBatchedStreamsArray;
5800 }
5801 /* Update stream id of all the requested buffers */
5802 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5803 streamsArray)) {
5804 LOGE("Failed to set stream type mask in the parameters");
5805 return BAD_VALUE;
5806 }
5807
5808 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5809 mParameters);
5810 if (rc < 0) {
5811 LOGE("set_parms failed");
5812 }
5813 /* reset to zero coz, the batch is queued */
5814 mToBeQueuedVidBufs = 0;
5815 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5816 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5817 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005818 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5819 uint32_t m = 0;
5820 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5821 if (streamsArray.stream_request[k].streamID ==
5822 mBatchedStreamsArray.stream_request[m].streamID)
5823 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 }
5825 if (m == mBatchedStreamsArray.num_streams) {
5826 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5827 streamID = streamsArray.stream_request[k].streamID;
5828 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5829 buf_index = streamsArray.stream_request[k].buf_index;
5830 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5831 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005832 }
5833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005834 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005835 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005836 }
5837
5838 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5839
5840 mState = STARTED;
5841 // Added a timed condition wait
5842 struct timespec ts;
5843 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005844 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005845 if (rc < 0) {
5846 isValidTimeout = 0;
5847 LOGE("Error reading the real time clock!!");
5848 }
5849 else {
5850 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005851 int64_t timeout = 5;
5852 {
5853 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5854 // If there is a pending HDR+ request, the following requests may be blocked until the
5855 // HDR+ request is done. So allow a longer timeout.
5856 if (mHdrPlusPendingRequests.size() > 0) {
5857 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5858 }
5859 }
5860 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005861 }
5862 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005863 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005864 (mState != ERROR) && (mState != DEINIT)) {
5865 if (!isValidTimeout) {
5866 LOGD("Blocking on conditional wait");
5867 pthread_cond_wait(&mRequestCond, &mMutex);
5868 }
5869 else {
5870 LOGD("Blocking on timed conditional wait");
5871 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5872 if (rc == ETIMEDOUT) {
5873 rc = -ENODEV;
5874 LOGE("Unblocked on timeout!!!!");
5875 break;
5876 }
5877 }
5878 LOGD("Unblocked");
5879 if (mWokenUpByDaemon) {
5880 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005881 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005882 break;
5883 }
5884 }
5885 pthread_mutex_unlock(&mMutex);
5886
5887 return rc;
5888}
5889
5890/*===========================================================================
5891 * FUNCTION : dump
5892 *
5893 * DESCRIPTION:
5894 *
5895 * PARAMETERS :
5896 *
5897 *
5898 * RETURN :
5899 *==========================================================================*/
5900void QCamera3HardwareInterface::dump(int fd)
5901{
5902 pthread_mutex_lock(&mMutex);
5903 dprintf(fd, "\n Camera HAL3 information Begin \n");
5904
5905 dprintf(fd, "\nNumber of pending requests: %zu \n",
5906 mPendingRequestsList.size());
5907 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5908 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5909 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5910 for(pendingRequestIterator i = mPendingRequestsList.begin();
5911 i != mPendingRequestsList.end(); i++) {
5912 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5913 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5914 i->input_buffer);
5915 }
5916 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5917 mPendingBuffersMap.get_num_overall_buffers());
5918 dprintf(fd, "-------+------------------\n");
5919 dprintf(fd, " Frame | Stream type mask \n");
5920 dprintf(fd, "-------+------------------\n");
5921 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5922 for(auto &j : req.mPendingBufferList) {
5923 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5924 dprintf(fd, " %5d | %11d \n",
5925 req.frame_number, channel->getStreamTypeMask());
5926 }
5927 }
5928 dprintf(fd, "-------+------------------\n");
5929
5930 dprintf(fd, "\nPending frame drop list: %zu\n",
5931 mPendingFrameDropList.size());
5932 dprintf(fd, "-------+-----------\n");
5933 dprintf(fd, " Frame | Stream ID \n");
5934 dprintf(fd, "-------+-----------\n");
5935 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5936 i != mPendingFrameDropList.end(); i++) {
5937 dprintf(fd, " %5d | %9d \n",
5938 i->frame_number, i->stream_ID);
5939 }
5940 dprintf(fd, "-------+-----------\n");
5941
5942 dprintf(fd, "\n Camera HAL3 information End \n");
5943
5944 /* use dumpsys media.camera as trigger to send update debug level event */
5945 mUpdateDebugLevel = true;
5946 pthread_mutex_unlock(&mMutex);
5947 return;
5948}
5949
5950/*===========================================================================
5951 * FUNCTION : flush
5952 *
5953 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5954 * conditionally restarts channels
5955 *
5956 * PARAMETERS :
5957 * @ restartChannels: re-start all channels
5958 *
5959 *
5960 * RETURN :
5961 * 0 on success
5962 * Error code on failure
5963 *==========================================================================*/
5964int QCamera3HardwareInterface::flush(bool restartChannels)
5965{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005967 int32_t rc = NO_ERROR;
5968
5969 LOGD("Unblocking Process Capture Request");
5970 pthread_mutex_lock(&mMutex);
5971 mFlush = true;
5972 pthread_mutex_unlock(&mMutex);
5973
5974 rc = stopAllChannels();
5975 // unlink of dualcam
5976 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005977 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5978 &m_pDualCamCmdPtr->bundle_info;
5979 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005980 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5981 pthread_mutex_lock(&gCamLock);
5982
5983 if (mIsMainCamera == 1) {
5984 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5985 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005986 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005987 // related session id should be session id of linked session
5988 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5989 } else {
5990 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5991 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005992 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005993 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5994 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005995 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005996 pthread_mutex_unlock(&gCamLock);
5997
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005998 rc = mCameraHandle->ops->set_dual_cam_cmd(
5999 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006000 if (rc < 0) {
6001 LOGE("Dualcam: Unlink failed, but still proceed to close");
6002 }
6003 }
6004
6005 if (rc < 0) {
6006 LOGE("stopAllChannels failed");
6007 return rc;
6008 }
6009 if (mChannelHandle) {
6010 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6011 mChannelHandle);
6012 }
6013
6014 // Reset bundle info
6015 rc = setBundleInfo();
6016 if (rc < 0) {
6017 LOGE("setBundleInfo failed %d", rc);
6018 return rc;
6019 }
6020
6021 // Mutex Lock
6022 pthread_mutex_lock(&mMutex);
6023
6024 // Unblock process_capture_request
6025 mPendingLiveRequest = 0;
6026 pthread_cond_signal(&mRequestCond);
6027
6028 rc = notifyErrorForPendingRequests();
6029 if (rc < 0) {
6030 LOGE("notifyErrorForPendingRequests failed");
6031 pthread_mutex_unlock(&mMutex);
6032 return rc;
6033 }
6034
6035 mFlush = false;
6036
6037 // Start the Streams/Channels
6038 if (restartChannels) {
6039 rc = startAllChannels();
6040 if (rc < 0) {
6041 LOGE("startAllChannels failed");
6042 pthread_mutex_unlock(&mMutex);
6043 return rc;
6044 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006045 if (mChannelHandle) {
6046 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6047 mChannelHandle);
6048 if (rc < 0) {
6049 LOGE("start_channel failed");
6050 pthread_mutex_unlock(&mMutex);
6051 return rc;
6052 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006053 }
6054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006055 pthread_mutex_unlock(&mMutex);
6056
6057 return 0;
6058}
6059
6060/*===========================================================================
6061 * FUNCTION : flushPerf
6062 *
6063 * DESCRIPTION: This is the performance optimization version of flush that does
6064 * not use stream off, rather flushes the system
6065 *
6066 * PARAMETERS :
6067 *
6068 *
6069 * RETURN : 0 : success
6070 * -EINVAL: input is malformed (device is not valid)
6071 * -ENODEV: if the device has encountered a serious error
6072 *==========================================================================*/
6073int QCamera3HardwareInterface::flushPerf()
6074{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 int32_t rc = 0;
6077 struct timespec timeout;
6078 bool timed_wait = false;
6079
6080 pthread_mutex_lock(&mMutex);
6081 mFlushPerf = true;
6082 mPendingBuffersMap.numPendingBufsAtFlush =
6083 mPendingBuffersMap.get_num_overall_buffers();
6084 LOGD("Calling flush. Wait for %d buffers to return",
6085 mPendingBuffersMap.numPendingBufsAtFlush);
6086
6087 /* send the flush event to the backend */
6088 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6089 if (rc < 0) {
6090 LOGE("Error in flush: IOCTL failure");
6091 mFlushPerf = false;
6092 pthread_mutex_unlock(&mMutex);
6093 return -ENODEV;
6094 }
6095
6096 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6097 LOGD("No pending buffers in HAL, return flush");
6098 mFlushPerf = false;
6099 pthread_mutex_unlock(&mMutex);
6100 return rc;
6101 }
6102
6103 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006104 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006105 if (rc < 0) {
6106 LOGE("Error reading the real time clock, cannot use timed wait");
6107 } else {
6108 timeout.tv_sec += FLUSH_TIMEOUT;
6109 timed_wait = true;
6110 }
6111
6112 //Block on conditional variable
6113 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6114 LOGD("Waiting on mBuffersCond");
6115 if (!timed_wait) {
6116 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6117 if (rc != 0) {
6118 LOGE("pthread_cond_wait failed due to rc = %s",
6119 strerror(rc));
6120 break;
6121 }
6122 } else {
6123 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6124 if (rc != 0) {
6125 LOGE("pthread_cond_timedwait failed due to rc = %s",
6126 strerror(rc));
6127 break;
6128 }
6129 }
6130 }
6131 if (rc != 0) {
6132 mFlushPerf = false;
6133 pthread_mutex_unlock(&mMutex);
6134 return -ENODEV;
6135 }
6136
6137 LOGD("Received buffers, now safe to return them");
6138
6139 //make sure the channels handle flush
6140 //currently only required for the picture channel to release snapshot resources
6141 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6142 it != mStreamInfo.end(); it++) {
6143 QCamera3Channel *channel = (*it)->channel;
6144 if (channel) {
6145 rc = channel->flush();
6146 if (rc) {
6147 LOGE("Flushing the channels failed with error %d", rc);
6148 // even though the channel flush failed we need to continue and
6149 // return the buffers we have to the framework, however the return
6150 // value will be an error
6151 rc = -ENODEV;
6152 }
6153 }
6154 }
6155
6156 /* notify the frameworks and send errored results */
6157 rc = notifyErrorForPendingRequests();
6158 if (rc < 0) {
6159 LOGE("notifyErrorForPendingRequests failed");
6160 pthread_mutex_unlock(&mMutex);
6161 return rc;
6162 }
6163
6164 //unblock process_capture_request
6165 mPendingLiveRequest = 0;
6166 unblockRequestIfNecessary();
6167
6168 mFlushPerf = false;
6169 pthread_mutex_unlock(&mMutex);
6170 LOGD ("Flush Operation complete. rc = %d", rc);
6171 return rc;
6172}
6173
6174/*===========================================================================
6175 * FUNCTION : handleCameraDeviceError
6176 *
6177 * DESCRIPTION: This function calls internal flush and notifies the error to
6178 * framework and updates the state variable.
6179 *
6180 * PARAMETERS : None
6181 *
6182 * RETURN : NO_ERROR on Success
6183 * Error code on failure
6184 *==========================================================================*/
6185int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6186{
6187 int32_t rc = NO_ERROR;
6188
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006189 {
6190 Mutex::Autolock lock(mFlushLock);
6191 pthread_mutex_lock(&mMutex);
6192 if (mState != ERROR) {
6193 //if mState != ERROR, nothing to be done
6194 pthread_mutex_unlock(&mMutex);
6195 return NO_ERROR;
6196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006197 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006198
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006199 rc = flush(false /* restart channels */);
6200 if (NO_ERROR != rc) {
6201 LOGE("internal flush to handle mState = ERROR failed");
6202 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006203
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006204 pthread_mutex_lock(&mMutex);
6205 mState = DEINIT;
6206 pthread_mutex_unlock(&mMutex);
6207 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006208
6209 camera3_notify_msg_t notify_msg;
6210 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6211 notify_msg.type = CAMERA3_MSG_ERROR;
6212 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6213 notify_msg.message.error.error_stream = NULL;
6214 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006215 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006216
6217 return rc;
6218}
6219
6220/*===========================================================================
6221 * FUNCTION : captureResultCb
6222 *
6223 * DESCRIPTION: Callback handler for all capture result
6224 * (streams, as well as metadata)
6225 *
6226 * PARAMETERS :
6227 * @metadata : metadata information
6228 * @buffer : actual gralloc buffer to be returned to frameworks.
6229 * NULL if metadata.
6230 *
6231 * RETURN : NONE
6232 *==========================================================================*/
6233void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6234 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6235{
6236 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006237 pthread_mutex_lock(&mMutex);
6238 uint8_t batchSize = mBatchSize;
6239 pthread_mutex_unlock(&mMutex);
6240 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 handleBatchMetadata(metadata_buf,
6242 true /* free_and_bufdone_meta_buf */);
6243 } else { /* mBatchSize = 0 */
6244 hdrPlusPerfLock(metadata_buf);
6245 pthread_mutex_lock(&mMutex);
6246 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006247 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006248 true /* last urgent frame of batch metadata */,
6249 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006250 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 pthread_mutex_unlock(&mMutex);
6252 }
6253 } else if (isInputBuffer) {
6254 pthread_mutex_lock(&mMutex);
6255 handleInputBufferWithLock(frame_number);
6256 pthread_mutex_unlock(&mMutex);
6257 } else {
6258 pthread_mutex_lock(&mMutex);
6259 handleBufferWithLock(buffer, frame_number);
6260 pthread_mutex_unlock(&mMutex);
6261 }
6262 return;
6263}
6264
6265/*===========================================================================
6266 * FUNCTION : getReprocessibleOutputStreamId
6267 *
6268 * DESCRIPTION: Get source output stream id for the input reprocess stream
6269 * based on size and format, which would be the largest
6270 * output stream if an input stream exists.
6271 *
6272 * PARAMETERS :
6273 * @id : return the stream id if found
6274 *
6275 * RETURN : int32_t type of status
6276 * NO_ERROR -- success
6277 * none-zero failure code
6278 *==========================================================================*/
6279int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6280{
6281 /* check if any output or bidirectional stream with the same size and format
6282 and return that stream */
6283 if ((mInputStreamInfo.dim.width > 0) &&
6284 (mInputStreamInfo.dim.height > 0)) {
6285 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6286 it != mStreamInfo.end(); it++) {
6287
6288 camera3_stream_t *stream = (*it)->stream;
6289 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6290 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6291 (stream->format == mInputStreamInfo.format)) {
6292 // Usage flag for an input stream and the source output stream
6293 // may be different.
6294 LOGD("Found reprocessible output stream! %p", *it);
6295 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6296 stream->usage, mInputStreamInfo.usage);
6297
6298 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6299 if (channel != NULL && channel->mStreams[0]) {
6300 id = channel->mStreams[0]->getMyServerID();
6301 return NO_ERROR;
6302 }
6303 }
6304 }
6305 } else {
6306 LOGD("No input stream, so no reprocessible output stream");
6307 }
6308 return NAME_NOT_FOUND;
6309}
6310
6311/*===========================================================================
6312 * FUNCTION : lookupFwkName
6313 *
6314 * DESCRIPTION: In case the enum is not same in fwk and backend
6315 * make sure the parameter is correctly propogated
6316 *
6317 * PARAMETERS :
6318 * @arr : map between the two enums
6319 * @len : len of the map
6320 * @hal_name : name of the hal_parm to map
6321 *
6322 * RETURN : int type of status
6323 * fwk_name -- success
6324 * none-zero failure code
6325 *==========================================================================*/
6326template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6327 size_t len, halType hal_name)
6328{
6329
6330 for (size_t i = 0; i < len; i++) {
6331 if (arr[i].hal_name == hal_name) {
6332 return arr[i].fwk_name;
6333 }
6334 }
6335
6336 /* Not able to find matching framework type is not necessarily
6337 * an error case. This happens when mm-camera supports more attributes
6338 * than the frameworks do */
6339 LOGH("Cannot find matching framework type");
6340 return NAME_NOT_FOUND;
6341}
6342
6343/*===========================================================================
6344 * FUNCTION : lookupHalName
6345 *
6346 * DESCRIPTION: In case the enum is not same in fwk and backend
6347 * make sure the parameter is correctly propogated
6348 *
6349 * PARAMETERS :
6350 * @arr : map between the two enums
6351 * @len : len of the map
6352 * @fwk_name : name of the hal_parm to map
6353 *
6354 * RETURN : int32_t type of status
6355 * hal_name -- success
6356 * none-zero failure code
6357 *==========================================================================*/
6358template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6359 size_t len, fwkType fwk_name)
6360{
6361 for (size_t i = 0; i < len; i++) {
6362 if (arr[i].fwk_name == fwk_name) {
6363 return arr[i].hal_name;
6364 }
6365 }
6366
6367 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6368 return NAME_NOT_FOUND;
6369}
6370
6371/*===========================================================================
6372 * FUNCTION : lookupProp
6373 *
6374 * DESCRIPTION: lookup a value by its name
6375 *
6376 * PARAMETERS :
6377 * @arr : map between the two enums
6378 * @len : size of the map
6379 * @name : name to be looked up
6380 *
6381 * RETURN : Value if found
6382 * CAM_CDS_MODE_MAX if not found
6383 *==========================================================================*/
6384template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6385 size_t len, const char *name)
6386{
6387 if (name) {
6388 for (size_t i = 0; i < len; i++) {
6389 if (!strcmp(arr[i].desc, name)) {
6390 return arr[i].val;
6391 }
6392 }
6393 }
6394 return CAM_CDS_MODE_MAX;
6395}
6396
6397/*===========================================================================
6398 *
6399 * DESCRIPTION:
6400 *
6401 * PARAMETERS :
6402 * @metadata : metadata information from callback
6403 * @timestamp: metadata buffer timestamp
6404 * @request_id: request id
6405 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006406 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006407 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6408 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006409 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006410 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6411 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006412 *
6413 * RETURN : camera_metadata_t*
6414 * metadata in a format specified by fwk
6415 *==========================================================================*/
6416camera_metadata_t*
6417QCamera3HardwareInterface::translateFromHalMetadata(
6418 metadata_buffer_t *metadata,
6419 nsecs_t timestamp,
6420 int32_t request_id,
6421 const CameraMetadata& jpegMetadata,
6422 uint8_t pipeline_depth,
6423 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006424 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006425 /* DevCamDebug metadata translateFromHalMetadata argument */
6426 uint8_t DevCamDebug_meta_enable,
6427 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006428 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006429 uint8_t fwk_cacMode,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006430 bool lastMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07006431{
6432 CameraMetadata camMetadata;
6433 camera_metadata_t *resultMetadata;
6434
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006435 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006436 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6437 * Timestamp is needed because it's used for shutter notify calculation.
6438 * */
6439 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6440 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006441 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006442 }
6443
Thierry Strudel3d639192016-09-09 11:52:26 -07006444 if (jpegMetadata.entryCount())
6445 camMetadata.append(jpegMetadata);
6446
6447 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6448 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6449 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6450 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006451 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006452 if (mBatchSize == 0) {
6453 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6454 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6455 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006456
Samuel Ha68ba5172016-12-15 18:41:12 -08006457 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6458 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6459 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6460 // DevCamDebug metadata translateFromHalMetadata AF
6461 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6462 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6463 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6464 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6465 }
6466 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6467 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6468 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6469 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6470 }
6471 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6472 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6473 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6474 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6475 }
6476 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6477 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6478 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6479 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6480 }
6481 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6482 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6483 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6484 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6485 }
6486 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6487 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6488 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6489 *DevCamDebug_af_monitor_pdaf_target_pos;
6490 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6491 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6492 }
6493 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6494 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6495 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6496 *DevCamDebug_af_monitor_pdaf_confidence;
6497 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6498 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6499 }
6500 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6501 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6502 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6503 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6504 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6505 }
6506 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6507 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6508 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6509 *DevCamDebug_af_monitor_tof_target_pos;
6510 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6511 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6512 }
6513 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6514 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6515 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6516 *DevCamDebug_af_monitor_tof_confidence;
6517 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6518 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6519 }
6520 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6521 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6522 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6523 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6524 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6525 }
6526 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6527 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6528 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6529 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6530 &fwk_DevCamDebug_af_monitor_type_select, 1);
6531 }
6532 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6533 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6534 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6535 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6536 &fwk_DevCamDebug_af_monitor_refocus, 1);
6537 }
6538 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6539 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6540 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6541 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6542 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6543 }
6544 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6545 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6546 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6547 *DevCamDebug_af_search_pdaf_target_pos;
6548 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6549 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6550 }
6551 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6552 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6553 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6554 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6555 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6558 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6559 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6560 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6561 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6562 }
6563 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6564 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6565 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6566 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6567 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6568 }
6569 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6570 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6571 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6572 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6573 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6574 }
6575 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6576 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6577 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6578 *DevCamDebug_af_search_tof_target_pos;
6579 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6580 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6581 }
6582 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6583 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6584 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6585 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6586 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6587 }
6588 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6589 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6590 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6591 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6592 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6593 }
6594 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6595 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6596 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6597 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6598 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6599 }
6600 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6601 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6602 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6603 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6604 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6605 }
6606 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6607 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6608 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6609 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6610 &fwk_DevCamDebug_af_search_type_select, 1);
6611 }
6612 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6613 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6614 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6615 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6616 &fwk_DevCamDebug_af_search_next_pos, 1);
6617 }
6618 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6619 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6620 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6621 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6622 &fwk_DevCamDebug_af_search_target_pos, 1);
6623 }
6624 // DevCamDebug metadata translateFromHalMetadata AEC
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6626 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6627 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6628 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6629 }
6630 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6631 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6632 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6633 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6634 }
6635 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6636 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6637 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6638 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6641 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6642 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6643 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6644 }
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6646 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6647 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6648 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6649 }
6650 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6651 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6652 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6653 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6656 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6657 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6658 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6659 }
6660 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6661 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6662 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6663 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6664 }
Samuel Ha34229982017-02-17 13:51:11 -08006665 // DevCamDebug metadata translateFromHalMetadata zzHDR
6666 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6667 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6668 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6669 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6672 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006673 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006674 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6675 }
6676 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6677 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6678 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6679 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6682 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006683 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006684 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6685 }
6686 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6687 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6688 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6689 *DevCamDebug_aec_hdr_sensitivity_ratio;
6690 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6691 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6692 }
6693 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6694 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6695 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6696 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6697 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6698 }
6699 // DevCamDebug metadata translateFromHalMetadata ADRC
6700 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6701 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6702 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6703 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6704 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6705 }
6706 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6707 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6708 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6709 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6710 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6711 }
6712 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6713 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6714 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6715 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6716 }
6717 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6718 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6719 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6720 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6721 }
6722 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6723 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6724 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6725 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6728 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6729 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6730 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6731 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006732 // DevCamDebug metadata translateFromHalMetadata AWB
6733 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6734 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6735 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6736 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6737 }
6738 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6739 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6740 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6741 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6742 }
6743 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6744 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6745 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6746 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6749 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6750 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6751 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6752 }
6753 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6754 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6755 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6756 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6757 }
6758 }
6759 // atrace_end(ATRACE_TAG_ALWAYS);
6760
Thierry Strudel3d639192016-09-09 11:52:26 -07006761 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6762 int64_t fwk_frame_number = *frame_number;
6763 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6764 }
6765
6766 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6767 int32_t fps_range[2];
6768 fps_range[0] = (int32_t)float_range->min_fps;
6769 fps_range[1] = (int32_t)float_range->max_fps;
6770 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6771 fps_range, 2);
6772 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6773 fps_range[0], fps_range[1]);
6774 }
6775
6776 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6777 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6778 }
6779
6780 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6781 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6782 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6783 *sceneMode);
6784 if (NAME_NOT_FOUND != val) {
6785 uint8_t fwkSceneMode = (uint8_t)val;
6786 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6787 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6788 fwkSceneMode);
6789 }
6790 }
6791
6792 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6793 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6794 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6795 }
6796
6797 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6798 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6799 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6800 }
6801
6802 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6803 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6804 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6805 }
6806
6807 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6808 CAM_INTF_META_EDGE_MODE, metadata) {
6809 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6810 }
6811
6812 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6813 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6814 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6815 }
6816
6817 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6818 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6819 }
6820
6821 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6822 if (0 <= *flashState) {
6823 uint8_t fwk_flashState = (uint8_t) *flashState;
6824 if (!gCamCapability[mCameraId]->flash_available) {
6825 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6826 }
6827 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6828 }
6829 }
6830
6831 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6832 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6833 if (NAME_NOT_FOUND != val) {
6834 uint8_t fwk_flashMode = (uint8_t)val;
6835 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6836 }
6837 }
6838
6839 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6840 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6841 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6842 }
6843
6844 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6845 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6846 }
6847
6848 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6849 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6850 }
6851
6852 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6853 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6854 }
6855
6856 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6857 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6858 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6859 }
6860
6861 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6862 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6863 LOGD("fwk_videoStab = %d", fwk_videoStab);
6864 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6865 } else {
6866 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6867 // and so hardcoding the Video Stab result to OFF mode.
6868 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6869 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006870 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006871 }
6872
6873 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6874 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6875 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6876 }
6877
6878 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6879 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6880 }
6881
Thierry Strudel3d639192016-09-09 11:52:26 -07006882 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6883 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006884 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006885
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006886 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6887 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006888
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006889 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006890 blackLevelAppliedPattern->cam_black_level[0],
6891 blackLevelAppliedPattern->cam_black_level[1],
6892 blackLevelAppliedPattern->cam_black_level[2],
6893 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006894 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6895 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006896
6897#ifndef USE_HAL_3_3
6898 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306899 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006900 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306901 fwk_blackLevelInd[0] /= 16.0;
6902 fwk_blackLevelInd[1] /= 16.0;
6903 fwk_blackLevelInd[2] /= 16.0;
6904 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006905 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6906 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006907#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006908 }
6909
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006910#ifndef USE_HAL_3_3
6911 // Fixed whitelevel is used by ISP/Sensor
6912 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6913 &gCamCapability[mCameraId]->white_level, 1);
6914#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006915
6916 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6917 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6918 int32_t scalerCropRegion[4];
6919 scalerCropRegion[0] = hScalerCropRegion->left;
6920 scalerCropRegion[1] = hScalerCropRegion->top;
6921 scalerCropRegion[2] = hScalerCropRegion->width;
6922 scalerCropRegion[3] = hScalerCropRegion->height;
6923
6924 // Adjust crop region from sensor output coordinate system to active
6925 // array coordinate system.
6926 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6927 scalerCropRegion[2], scalerCropRegion[3]);
6928
6929 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6930 }
6931
6932 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6933 LOGD("sensorExpTime = %lld", *sensorExpTime);
6934 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6935 }
6936
6937 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6938 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6939 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6940 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6941 }
6942
6943 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6944 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6945 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6946 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6947 sensorRollingShutterSkew, 1);
6948 }
6949
6950 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6951 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6952 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6953
6954 //calculate the noise profile based on sensitivity
6955 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6956 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6957 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6958 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6959 noise_profile[i] = noise_profile_S;
6960 noise_profile[i+1] = noise_profile_O;
6961 }
6962 LOGD("noise model entry (S, O) is (%f, %f)",
6963 noise_profile_S, noise_profile_O);
6964 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6965 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6966 }
6967
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006968#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006969 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006970 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006971 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006972 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006973 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6974 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6975 }
6976 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006977#endif
6978
Thierry Strudel3d639192016-09-09 11:52:26 -07006979 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6980 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6981 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6982 }
6983
6984 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6985 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6986 *faceDetectMode);
6987 if (NAME_NOT_FOUND != val) {
6988 uint8_t fwk_faceDetectMode = (uint8_t)val;
6989 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6990
6991 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6992 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6993 CAM_INTF_META_FACE_DETECTION, metadata) {
6994 uint8_t numFaces = MIN(
6995 faceDetectionInfo->num_faces_detected, MAX_ROI);
6996 int32_t faceIds[MAX_ROI];
6997 uint8_t faceScores[MAX_ROI];
6998 int32_t faceRectangles[MAX_ROI * 4];
6999 int32_t faceLandmarks[MAX_ROI * 6];
7000 size_t j = 0, k = 0;
7001
7002 for (size_t i = 0; i < numFaces; i++) {
7003 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7004 // Adjust crop region from sensor output coordinate system to active
7005 // array coordinate system.
7006 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7007 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7008 rect.width, rect.height);
7009
7010 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7011 faceRectangles+j, -1);
7012
7013 j+= 4;
7014 }
7015 if (numFaces <= 0) {
7016 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7017 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7018 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7019 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7020 }
7021
7022 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7023 numFaces);
7024 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7025 faceRectangles, numFaces * 4U);
7026 if (fwk_faceDetectMode ==
7027 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7028 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7029 CAM_INTF_META_FACE_LANDMARK, metadata) {
7030
7031 for (size_t i = 0; i < numFaces; i++) {
7032 // Map the co-ordinate sensor output coordinate system to active
7033 // array coordinate system.
7034 mCropRegionMapper.toActiveArray(
7035 landmarks->face_landmarks[i].left_eye_center.x,
7036 landmarks->face_landmarks[i].left_eye_center.y);
7037 mCropRegionMapper.toActiveArray(
7038 landmarks->face_landmarks[i].right_eye_center.x,
7039 landmarks->face_landmarks[i].right_eye_center.y);
7040 mCropRegionMapper.toActiveArray(
7041 landmarks->face_landmarks[i].mouth_center.x,
7042 landmarks->face_landmarks[i].mouth_center.y);
7043
7044 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007045 k+= TOTAL_LANDMARK_INDICES;
7046 }
7047 } else {
7048 for (size_t i = 0; i < numFaces; i++) {
7049 setInvalidLandmarks(faceLandmarks+k);
7050 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007051 }
7052 }
7053
7054 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7055 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7056 faceLandmarks, numFaces * 6U);
7057 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007058 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7059 CAM_INTF_META_FACE_BLINK, metadata) {
7060 uint8_t detected[MAX_ROI];
7061 uint8_t degree[MAX_ROI * 2];
7062 for (size_t i = 0; i < numFaces; i++) {
7063 detected[i] = blinks->blink[i].blink_detected;
7064 degree[2 * i] = blinks->blink[i].left_blink;
7065 degree[2 * i + 1] = blinks->blink[i].right_blink;
7066 }
7067 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7068 detected, numFaces);
7069 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7070 degree, numFaces * 2);
7071 }
7072 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7073 CAM_INTF_META_FACE_SMILE, metadata) {
7074 uint8_t degree[MAX_ROI];
7075 uint8_t confidence[MAX_ROI];
7076 for (size_t i = 0; i < numFaces; i++) {
7077 degree[i] = smiles->smile[i].smile_degree;
7078 confidence[i] = smiles->smile[i].smile_confidence;
7079 }
7080 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7081 degree, numFaces);
7082 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7083 confidence, numFaces);
7084 }
7085 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7086 CAM_INTF_META_FACE_GAZE, metadata) {
7087 int8_t angle[MAX_ROI];
7088 int32_t direction[MAX_ROI * 3];
7089 int8_t degree[MAX_ROI * 2];
7090 for (size_t i = 0; i < numFaces; i++) {
7091 angle[i] = gazes->gaze[i].gaze_angle;
7092 direction[3 * i] = gazes->gaze[i].updown_dir;
7093 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7094 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7095 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7096 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7097 }
7098 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7099 (uint8_t *)angle, numFaces);
7100 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7101 direction, numFaces * 3);
7102 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7103 (uint8_t *)degree, numFaces * 2);
7104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007105 }
7106 }
7107 }
7108 }
7109
7110 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7111 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007112 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007113 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007114 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007115
Shuzhen Wang14415f52016-11-16 18:26:18 -08007116 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7117 histogramBins = *histBins;
7118 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7119 }
7120
7121 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007122 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7123 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007124 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007125
7126 switch (stats_data->type) {
7127 case CAM_HISTOGRAM_TYPE_BAYER:
7128 switch (stats_data->bayer_stats.data_type) {
7129 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007130 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7131 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7134 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007135 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007136 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7137 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007138 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007139 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007140 case CAM_STATS_CHANNEL_R:
7141 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007142 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7143 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007144 }
7145 break;
7146 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007147 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007148 break;
7149 }
7150
Shuzhen Wang14415f52016-11-16 18:26:18 -08007151 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007152 }
7153 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007154 }
7155
7156 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7157 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7158 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7159 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7160 }
7161
7162 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7163 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7164 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7165 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7166 }
7167
7168 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7169 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7170 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7171 CAM_MAX_SHADING_MAP_HEIGHT);
7172 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7173 CAM_MAX_SHADING_MAP_WIDTH);
7174 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7175 lensShadingMap->lens_shading, 4U * map_width * map_height);
7176 }
7177
7178 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7179 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7180 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7181 }
7182
7183 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7184 //Populate CAM_INTF_META_TONEMAP_CURVES
7185 /* ch0 = G, ch 1 = B, ch 2 = R*/
7186 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7187 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7188 tonemap->tonemap_points_cnt,
7189 CAM_MAX_TONEMAP_CURVE_SIZE);
7190 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7191 }
7192
7193 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7194 &tonemap->curves[0].tonemap_points[0][0],
7195 tonemap->tonemap_points_cnt * 2);
7196
7197 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7198 &tonemap->curves[1].tonemap_points[0][0],
7199 tonemap->tonemap_points_cnt * 2);
7200
7201 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7202 &tonemap->curves[2].tonemap_points[0][0],
7203 tonemap->tonemap_points_cnt * 2);
7204 }
7205
7206 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7207 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7208 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7209 CC_GAIN_MAX);
7210 }
7211
7212 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7213 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7214 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7215 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7216 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7217 }
7218
7219 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7220 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7221 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7222 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7223 toneCurve->tonemap_points_cnt,
7224 CAM_MAX_TONEMAP_CURVE_SIZE);
7225 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7226 }
7227 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7228 (float*)toneCurve->curve.tonemap_points,
7229 toneCurve->tonemap_points_cnt * 2);
7230 }
7231
7232 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7233 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7234 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7235 predColorCorrectionGains->gains, 4);
7236 }
7237
7238 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7239 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7240 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7241 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7242 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7243 }
7244
7245 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7246 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7247 }
7248
7249 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7250 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7251 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7252 }
7253
7254 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7255 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7256 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7257 }
7258
7259 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7260 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7261 *effectMode);
7262 if (NAME_NOT_FOUND != val) {
7263 uint8_t fwk_effectMode = (uint8_t)val;
7264 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7265 }
7266 }
7267
7268 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7269 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7270 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7271 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7272 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7273 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7274 }
7275 int32_t fwk_testPatternData[4];
7276 fwk_testPatternData[0] = testPatternData->r;
7277 fwk_testPatternData[3] = testPatternData->b;
7278 switch (gCamCapability[mCameraId]->color_arrangement) {
7279 case CAM_FILTER_ARRANGEMENT_RGGB:
7280 case CAM_FILTER_ARRANGEMENT_GRBG:
7281 fwk_testPatternData[1] = testPatternData->gr;
7282 fwk_testPatternData[2] = testPatternData->gb;
7283 break;
7284 case CAM_FILTER_ARRANGEMENT_GBRG:
7285 case CAM_FILTER_ARRANGEMENT_BGGR:
7286 fwk_testPatternData[2] = testPatternData->gr;
7287 fwk_testPatternData[1] = testPatternData->gb;
7288 break;
7289 default:
7290 LOGE("color arrangement %d is not supported",
7291 gCamCapability[mCameraId]->color_arrangement);
7292 break;
7293 }
7294 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7295 }
7296
7297 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7298 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7299 }
7300
7301 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7302 String8 str((const char *)gps_methods);
7303 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7304 }
7305
7306 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7307 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7308 }
7309
7310 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7311 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7312 }
7313
7314 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7315 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7316 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7317 }
7318
7319 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7320 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7321 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7322 }
7323
7324 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7325 int32_t fwk_thumb_size[2];
7326 fwk_thumb_size[0] = thumb_size->width;
7327 fwk_thumb_size[1] = thumb_size->height;
7328 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7329 }
7330
7331 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7332 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7333 privateData,
7334 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7335 }
7336
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007337 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007338 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007339 meteringMode, 1);
7340 }
7341
Thierry Strudel54dc9782017-02-15 12:12:10 -08007342 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7343 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7344 LOGD("hdr_scene_data: %d %f\n",
7345 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7346 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7347 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7348 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7349 &isHdr, 1);
7350 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7351 &isHdrConfidence, 1);
7352 }
7353
7354
7355
Thierry Strudel3d639192016-09-09 11:52:26 -07007356 if (metadata->is_tuning_params_valid) {
7357 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7358 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7359 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7360
7361
7362 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7363 sizeof(uint32_t));
7364 data += sizeof(uint32_t);
7365
7366 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7367 sizeof(uint32_t));
7368 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7369 data += sizeof(uint32_t);
7370
7371 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7372 sizeof(uint32_t));
7373 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7374 data += sizeof(uint32_t);
7375
7376 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7377 sizeof(uint32_t));
7378 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7379 data += sizeof(uint32_t);
7380
7381 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7382 sizeof(uint32_t));
7383 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7384 data += sizeof(uint32_t);
7385
7386 metadata->tuning_params.tuning_mod3_data_size = 0;
7387 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7388 sizeof(uint32_t));
7389 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7390 data += sizeof(uint32_t);
7391
7392 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7393 TUNING_SENSOR_DATA_MAX);
7394 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7395 count);
7396 data += count;
7397
7398 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7399 TUNING_VFE_DATA_MAX);
7400 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7401 count);
7402 data += count;
7403
7404 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7405 TUNING_CPP_DATA_MAX);
7406 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7407 count);
7408 data += count;
7409
7410 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7411 TUNING_CAC_DATA_MAX);
7412 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7413 count);
7414 data += count;
7415
7416 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7417 (int32_t *)(void *)tuning_meta_data_blob,
7418 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7419 }
7420
7421 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7422 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7423 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7424 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7425 NEUTRAL_COL_POINTS);
7426 }
7427
7428 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7429 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7430 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7431 }
7432
7433 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7434 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7435 // Adjust crop region from sensor output coordinate system to active
7436 // array coordinate system.
7437 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7438 hAeRegions->rect.width, hAeRegions->rect.height);
7439
7440 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7441 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7442 REGIONS_TUPLE_COUNT);
7443 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7444 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7445 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7446 hAeRegions->rect.height);
7447 }
7448
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007449 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7450 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7451 if (NAME_NOT_FOUND != val) {
7452 uint8_t fwkAfMode = (uint8_t)val;
7453 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7454 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7455 } else {
7456 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7457 val);
7458 }
7459 }
7460
Thierry Strudel3d639192016-09-09 11:52:26 -07007461 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7462 uint8_t fwk_afState = (uint8_t) *afState;
7463 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007464 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007465 }
7466
7467 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7468 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7469 }
7470
7471 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7472 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7473 }
7474
7475 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7476 uint8_t fwk_lensState = *lensState;
7477 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7478 }
7479
Thierry Strudel3d639192016-09-09 11:52:26 -07007480
7481 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007482 uint32_t ab_mode = *hal_ab_mode;
7483 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7484 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7485 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7486 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007487 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007488 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007489 if (NAME_NOT_FOUND != val) {
7490 uint8_t fwk_ab_mode = (uint8_t)val;
7491 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7492 }
7493 }
7494
7495 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7496 int val = lookupFwkName(SCENE_MODES_MAP,
7497 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7498 if (NAME_NOT_FOUND != val) {
7499 uint8_t fwkBestshotMode = (uint8_t)val;
7500 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7501 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7502 } else {
7503 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7504 }
7505 }
7506
7507 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7508 uint8_t fwk_mode = (uint8_t) *mode;
7509 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7510 }
7511
7512 /* Constant metadata values to be update*/
7513 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7514 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7515
7516 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7517 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7518
7519 int32_t hotPixelMap[2];
7520 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7521
7522 // CDS
7523 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7524 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7525 }
7526
Thierry Strudel04e026f2016-10-10 11:27:36 -07007527 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7528 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007529 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007530 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7531 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7532 } else {
7533 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7534 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007535
7536 if(fwk_hdr != curr_hdr_state) {
7537 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7538 if(fwk_hdr)
7539 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7540 else
7541 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7542 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007543 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7544 }
7545
Thierry Strudel54dc9782017-02-15 12:12:10 -08007546 //binning correction
7547 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7548 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7549 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7550 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7551 }
7552
Thierry Strudel04e026f2016-10-10 11:27:36 -07007553 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007554 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007555 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7556 int8_t is_ir_on = 0;
7557
7558 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7559 if(is_ir_on != curr_ir_state) {
7560 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7561 if(is_ir_on)
7562 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7563 else
7564 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7565 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007566 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007567 }
7568
Thierry Strudel269c81a2016-10-12 12:13:59 -07007569 // AEC SPEED
7570 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7571 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7572 }
7573
7574 // AWB SPEED
7575 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7576 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7577 }
7578
Thierry Strudel3d639192016-09-09 11:52:26 -07007579 // TNR
7580 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7581 uint8_t tnr_enable = tnr->denoise_enable;
7582 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007583 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7584 int8_t is_tnr_on = 0;
7585
7586 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7587 if(is_tnr_on != curr_tnr_state) {
7588 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7589 if(is_tnr_on)
7590 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7591 else
7592 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007594
7595 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7596 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7597 }
7598
7599 // Reprocess crop data
7600 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7601 uint8_t cnt = crop_data->num_of_streams;
7602 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7603 // mm-qcamera-daemon only posts crop_data for streams
7604 // not linked to pproc. So no valid crop metadata is not
7605 // necessarily an error case.
7606 LOGD("No valid crop metadata entries");
7607 } else {
7608 uint32_t reproc_stream_id;
7609 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7610 LOGD("No reprocessible stream found, ignore crop data");
7611 } else {
7612 int rc = NO_ERROR;
7613 Vector<int32_t> roi_map;
7614 int32_t *crop = new int32_t[cnt*4];
7615 if (NULL == crop) {
7616 rc = NO_MEMORY;
7617 }
7618 if (NO_ERROR == rc) {
7619 int32_t streams_found = 0;
7620 for (size_t i = 0; i < cnt; i++) {
7621 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7622 if (pprocDone) {
7623 // HAL already does internal reprocessing,
7624 // either via reprocessing before JPEG encoding,
7625 // or offline postprocessing for pproc bypass case.
7626 crop[0] = 0;
7627 crop[1] = 0;
7628 crop[2] = mInputStreamInfo.dim.width;
7629 crop[3] = mInputStreamInfo.dim.height;
7630 } else {
7631 crop[0] = crop_data->crop_info[i].crop.left;
7632 crop[1] = crop_data->crop_info[i].crop.top;
7633 crop[2] = crop_data->crop_info[i].crop.width;
7634 crop[3] = crop_data->crop_info[i].crop.height;
7635 }
7636 roi_map.add(crop_data->crop_info[i].roi_map.left);
7637 roi_map.add(crop_data->crop_info[i].roi_map.top);
7638 roi_map.add(crop_data->crop_info[i].roi_map.width);
7639 roi_map.add(crop_data->crop_info[i].roi_map.height);
7640 streams_found++;
7641 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7642 crop[0], crop[1], crop[2], crop[3]);
7643 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7644 crop_data->crop_info[i].roi_map.left,
7645 crop_data->crop_info[i].roi_map.top,
7646 crop_data->crop_info[i].roi_map.width,
7647 crop_data->crop_info[i].roi_map.height);
7648 break;
7649
7650 }
7651 }
7652 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7653 &streams_found, 1);
7654 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7655 crop, (size_t)(streams_found * 4));
7656 if (roi_map.array()) {
7657 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7658 roi_map.array(), roi_map.size());
7659 }
7660 }
7661 if (crop) {
7662 delete [] crop;
7663 }
7664 }
7665 }
7666 }
7667
7668 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7669 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7670 // so hardcoding the CAC result to OFF mode.
7671 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7672 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7673 } else {
7674 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7675 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7676 *cacMode);
7677 if (NAME_NOT_FOUND != val) {
7678 uint8_t resultCacMode = (uint8_t)val;
7679 // check whether CAC result from CB is equal to Framework set CAC mode
7680 // If not equal then set the CAC mode came in corresponding request
7681 if (fwk_cacMode != resultCacMode) {
7682 resultCacMode = fwk_cacMode;
7683 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007684 //Check if CAC is disabled by property
7685 if (m_cacModeDisabled) {
7686 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7687 }
7688
Thierry Strudel3d639192016-09-09 11:52:26 -07007689 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7690 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7691 } else {
7692 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7693 }
7694 }
7695 }
7696
7697 // Post blob of cam_cds_data through vendor tag.
7698 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7699 uint8_t cnt = cdsInfo->num_of_streams;
7700 cam_cds_data_t cdsDataOverride;
7701 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7702 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7703 cdsDataOverride.num_of_streams = 1;
7704 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7705 uint32_t reproc_stream_id;
7706 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7707 LOGD("No reprocessible stream found, ignore cds data");
7708 } else {
7709 for (size_t i = 0; i < cnt; i++) {
7710 if (cdsInfo->cds_info[i].stream_id ==
7711 reproc_stream_id) {
7712 cdsDataOverride.cds_info[0].cds_enable =
7713 cdsInfo->cds_info[i].cds_enable;
7714 break;
7715 }
7716 }
7717 }
7718 } else {
7719 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7720 }
7721 camMetadata.update(QCAMERA3_CDS_INFO,
7722 (uint8_t *)&cdsDataOverride,
7723 sizeof(cam_cds_data_t));
7724 }
7725
7726 // Ldaf calibration data
7727 if (!mLdafCalibExist) {
7728 IF_META_AVAILABLE(uint32_t, ldafCalib,
7729 CAM_INTF_META_LDAF_EXIF, metadata) {
7730 mLdafCalibExist = true;
7731 mLdafCalib[0] = ldafCalib[0];
7732 mLdafCalib[1] = ldafCalib[1];
7733 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7734 ldafCalib[0], ldafCalib[1]);
7735 }
7736 }
7737
Thierry Strudel54dc9782017-02-15 12:12:10 -08007738 // EXIF debug data through vendor tag
7739 /*
7740 * Mobicat Mask can assume 3 values:
7741 * 1 refers to Mobicat data,
7742 * 2 refers to Stats Debug and Exif Debug Data
7743 * 3 refers to Mobicat and Stats Debug Data
7744 * We want to make sure that we are sending Exif debug data
7745 * only when Mobicat Mask is 2.
7746 */
7747 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7748 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7749 (uint8_t *)(void *)mExifParams.debug_params,
7750 sizeof(mm_jpeg_debug_exif_params_t));
7751 }
7752
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007753 // Reprocess and DDM debug data through vendor tag
7754 cam_reprocess_info_t repro_info;
7755 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007756 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7757 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007758 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007759 }
7760 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7761 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007762 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007763 }
7764 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7765 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007766 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007767 }
7768 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7769 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007770 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007771 }
7772 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7773 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007774 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007775 }
7776 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007777 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007778 }
7779 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7780 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007781 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007782 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007783 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7784 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7785 }
7786 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7787 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7788 }
7789 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7790 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007791
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007792 // INSTANT AEC MODE
7793 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7794 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7795 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7796 }
7797
Shuzhen Wange763e802016-03-31 10:24:29 -07007798 // AF scene change
7799 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7800 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7801 }
7802
Thierry Strudel3d639192016-09-09 11:52:26 -07007803 resultMetadata = camMetadata.release();
7804 return resultMetadata;
7805}
7806
7807/*===========================================================================
7808 * FUNCTION : saveExifParams
7809 *
7810 * DESCRIPTION:
7811 *
7812 * PARAMETERS :
7813 * @metadata : metadata information from callback
7814 *
7815 * RETURN : none
7816 *
7817 *==========================================================================*/
7818void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7819{
7820 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7821 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7822 if (mExifParams.debug_params) {
7823 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7824 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7825 }
7826 }
7827 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7828 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7829 if (mExifParams.debug_params) {
7830 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7831 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7832 }
7833 }
7834 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7835 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7836 if (mExifParams.debug_params) {
7837 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7838 mExifParams.debug_params->af_debug_params_valid = TRUE;
7839 }
7840 }
7841 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7842 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7843 if (mExifParams.debug_params) {
7844 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7845 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7846 }
7847 }
7848 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7849 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7850 if (mExifParams.debug_params) {
7851 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7852 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7853 }
7854 }
7855 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7856 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7857 if (mExifParams.debug_params) {
7858 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7859 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7860 }
7861 }
7862 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7863 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7864 if (mExifParams.debug_params) {
7865 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7866 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7867 }
7868 }
7869 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7870 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7871 if (mExifParams.debug_params) {
7872 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7873 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7874 }
7875 }
7876}
7877
7878/*===========================================================================
7879 * FUNCTION : get3AExifParams
7880 *
7881 * DESCRIPTION:
7882 *
7883 * PARAMETERS : none
7884 *
7885 *
7886 * RETURN : mm_jpeg_exif_params_t
7887 *
7888 *==========================================================================*/
7889mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7890{
7891 return mExifParams;
7892}
7893
7894/*===========================================================================
7895 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7896 *
7897 * DESCRIPTION:
7898 *
7899 * PARAMETERS :
7900 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007901 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7902 * urgent metadata in a batch. Always true for
7903 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007904 *
7905 * RETURN : camera_metadata_t*
7906 * metadata in a format specified by fwk
7907 *==========================================================================*/
7908camera_metadata_t*
7909QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007910 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007911{
7912 CameraMetadata camMetadata;
7913 camera_metadata_t *resultMetadata;
7914
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007915 if (!lastUrgentMetadataInBatch) {
7916 /* In batch mode, use empty metadata if this is not the last in batch
7917 */
7918 resultMetadata = allocate_camera_metadata(0, 0);
7919 return resultMetadata;
7920 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007921
7922 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7923 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7924 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7925 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7926 }
7927
7928 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7929 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7930 &aecTrigger->trigger, 1);
7931 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7932 &aecTrigger->trigger_id, 1);
7933 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7934 aecTrigger->trigger);
7935 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7936 aecTrigger->trigger_id);
7937 }
7938
7939 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7940 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7941 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7942 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7943 }
7944
Thierry Strudel3d639192016-09-09 11:52:26 -07007945 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7946 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7947 &af_trigger->trigger, 1);
7948 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7949 af_trigger->trigger);
7950 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7951 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7952 af_trigger->trigger_id);
7953 }
7954
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007955 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7956 /*af regions*/
7957 int32_t afRegions[REGIONS_TUPLE_COUNT];
7958 // Adjust crop region from sensor output coordinate system to active
7959 // array coordinate system.
7960 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7961 hAfRegions->rect.width, hAfRegions->rect.height);
7962
7963 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7964 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7965 REGIONS_TUPLE_COUNT);
7966 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7967 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7968 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7969 hAfRegions->rect.height);
7970 }
7971
Shuzhen Wangcc386c52017-03-29 09:28:08 -07007972 // AF region confidence
7973 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
7974 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
7975 }
7976
Thierry Strudel3d639192016-09-09 11:52:26 -07007977 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7978 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7979 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7980 if (NAME_NOT_FOUND != val) {
7981 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7982 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7983 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7984 } else {
7985 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7986 }
7987 }
7988
7989 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7990 uint32_t aeMode = CAM_AE_MODE_MAX;
7991 int32_t flashMode = CAM_FLASH_MODE_MAX;
7992 int32_t redeye = -1;
7993 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7994 aeMode = *pAeMode;
7995 }
7996 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7997 flashMode = *pFlashMode;
7998 }
7999 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8000 redeye = *pRedeye;
8001 }
8002
8003 if (1 == redeye) {
8004 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8005 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8006 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8007 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8008 flashMode);
8009 if (NAME_NOT_FOUND != val) {
8010 fwk_aeMode = (uint8_t)val;
8011 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8012 } else {
8013 LOGE("Unsupported flash mode %d", flashMode);
8014 }
8015 } else if (aeMode == CAM_AE_MODE_ON) {
8016 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8017 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8018 } else if (aeMode == CAM_AE_MODE_OFF) {
8019 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8020 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8021 } else {
8022 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8023 "flashMode:%d, aeMode:%u!!!",
8024 redeye, flashMode, aeMode);
8025 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008026 if (mInstantAEC) {
8027 // Increment frame Idx count untill a bound reached for instant AEC.
8028 mInstantAecFrameIdxCount++;
8029 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8030 CAM_INTF_META_AEC_INFO, metadata) {
8031 LOGH("ae_params->settled = %d",ae_params->settled);
8032 // If AEC settled, or if number of frames reached bound value,
8033 // should reset instant AEC.
8034 if (ae_params->settled ||
8035 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8036 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8037 mInstantAEC = false;
8038 mResetInstantAEC = true;
8039 mInstantAecFrameIdxCount = 0;
8040 }
8041 }
8042 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008043 resultMetadata = camMetadata.release();
8044 return resultMetadata;
8045}
8046
8047/*===========================================================================
8048 * FUNCTION : dumpMetadataToFile
8049 *
8050 * DESCRIPTION: Dumps tuning metadata to file system
8051 *
8052 * PARAMETERS :
8053 * @meta : tuning metadata
8054 * @dumpFrameCount : current dump frame count
8055 * @enabled : Enable mask
8056 *
8057 *==========================================================================*/
8058void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8059 uint32_t &dumpFrameCount,
8060 bool enabled,
8061 const char *type,
8062 uint32_t frameNumber)
8063{
8064 //Some sanity checks
8065 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8066 LOGE("Tuning sensor data size bigger than expected %d: %d",
8067 meta.tuning_sensor_data_size,
8068 TUNING_SENSOR_DATA_MAX);
8069 return;
8070 }
8071
8072 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8073 LOGE("Tuning VFE data size bigger than expected %d: %d",
8074 meta.tuning_vfe_data_size,
8075 TUNING_VFE_DATA_MAX);
8076 return;
8077 }
8078
8079 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8080 LOGE("Tuning CPP data size bigger than expected %d: %d",
8081 meta.tuning_cpp_data_size,
8082 TUNING_CPP_DATA_MAX);
8083 return;
8084 }
8085
8086 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8087 LOGE("Tuning CAC data size bigger than expected %d: %d",
8088 meta.tuning_cac_data_size,
8089 TUNING_CAC_DATA_MAX);
8090 return;
8091 }
8092 //
8093
8094 if(enabled){
8095 char timeBuf[FILENAME_MAX];
8096 char buf[FILENAME_MAX];
8097 memset(buf, 0, sizeof(buf));
8098 memset(timeBuf, 0, sizeof(timeBuf));
8099 time_t current_time;
8100 struct tm * timeinfo;
8101 time (&current_time);
8102 timeinfo = localtime (&current_time);
8103 if (timeinfo != NULL) {
8104 strftime (timeBuf, sizeof(timeBuf),
8105 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8106 }
8107 String8 filePath(timeBuf);
8108 snprintf(buf,
8109 sizeof(buf),
8110 "%dm_%s_%d.bin",
8111 dumpFrameCount,
8112 type,
8113 frameNumber);
8114 filePath.append(buf);
8115 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8116 if (file_fd >= 0) {
8117 ssize_t written_len = 0;
8118 meta.tuning_data_version = TUNING_DATA_VERSION;
8119 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8120 written_len += write(file_fd, data, sizeof(uint32_t));
8121 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8122 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8123 written_len += write(file_fd, data, sizeof(uint32_t));
8124 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8125 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8126 written_len += write(file_fd, data, sizeof(uint32_t));
8127 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8128 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8129 written_len += write(file_fd, data, sizeof(uint32_t));
8130 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8131 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8132 written_len += write(file_fd, data, sizeof(uint32_t));
8133 meta.tuning_mod3_data_size = 0;
8134 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8135 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8136 written_len += write(file_fd, data, sizeof(uint32_t));
8137 size_t total_size = meta.tuning_sensor_data_size;
8138 data = (void *)((uint8_t *)&meta.data);
8139 written_len += write(file_fd, data, total_size);
8140 total_size = meta.tuning_vfe_data_size;
8141 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8142 written_len += write(file_fd, data, total_size);
8143 total_size = meta.tuning_cpp_data_size;
8144 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8145 written_len += write(file_fd, data, total_size);
8146 total_size = meta.tuning_cac_data_size;
8147 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8148 written_len += write(file_fd, data, total_size);
8149 close(file_fd);
8150 }else {
8151 LOGE("fail to open file for metadata dumping");
8152 }
8153 }
8154}
8155
8156/*===========================================================================
8157 * FUNCTION : cleanAndSortStreamInfo
8158 *
8159 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8160 * and sort them such that raw stream is at the end of the list
8161 * This is a workaround for camera daemon constraint.
8162 *
8163 * PARAMETERS : None
8164 *
8165 *==========================================================================*/
8166void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8167{
8168 List<stream_info_t *> newStreamInfo;
8169
8170 /*clean up invalid streams*/
8171 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8172 it != mStreamInfo.end();) {
8173 if(((*it)->status) == INVALID){
8174 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8175 delete channel;
8176 free(*it);
8177 it = mStreamInfo.erase(it);
8178 } else {
8179 it++;
8180 }
8181 }
8182
8183 // Move preview/video/callback/snapshot streams into newList
8184 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8185 it != mStreamInfo.end();) {
8186 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8187 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8188 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8189 newStreamInfo.push_back(*it);
8190 it = mStreamInfo.erase(it);
8191 } else
8192 it++;
8193 }
8194 // Move raw streams into newList
8195 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8196 it != mStreamInfo.end();) {
8197 newStreamInfo.push_back(*it);
8198 it = mStreamInfo.erase(it);
8199 }
8200
8201 mStreamInfo = newStreamInfo;
8202}
8203
8204/*===========================================================================
8205 * FUNCTION : extractJpegMetadata
8206 *
8207 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8208 * JPEG metadata is cached in HAL, and return as part of capture
8209 * result when metadata is returned from camera daemon.
8210 *
8211 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8212 * @request: capture request
8213 *
8214 *==========================================================================*/
8215void QCamera3HardwareInterface::extractJpegMetadata(
8216 CameraMetadata& jpegMetadata,
8217 const camera3_capture_request_t *request)
8218{
8219 CameraMetadata frame_settings;
8220 frame_settings = request->settings;
8221
8222 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8223 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8224 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8225 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8226
8227 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8228 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8229 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8230 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8231
8232 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8233 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8234 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8235 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8236
8237 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8238 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8239 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8240 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8241
8242 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8243 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8244 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8245 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8246
8247 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8248 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8249 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8250 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8251
8252 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8253 int32_t thumbnail_size[2];
8254 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8255 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8256 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8257 int32_t orientation =
8258 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008259 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008260 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8261 int32_t temp;
8262 temp = thumbnail_size[0];
8263 thumbnail_size[0] = thumbnail_size[1];
8264 thumbnail_size[1] = temp;
8265 }
8266 }
8267 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8268 thumbnail_size,
8269 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8270 }
8271
8272}
8273
8274/*===========================================================================
8275 * FUNCTION : convertToRegions
8276 *
8277 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8278 *
8279 * PARAMETERS :
8280 * @rect : cam_rect_t struct to convert
8281 * @region : int32_t destination array
8282 * @weight : if we are converting from cam_area_t, weight is valid
8283 * else weight = -1
8284 *
8285 *==========================================================================*/
8286void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8287 int32_t *region, int weight)
8288{
8289 region[0] = rect.left;
8290 region[1] = rect.top;
8291 region[2] = rect.left + rect.width;
8292 region[3] = rect.top + rect.height;
8293 if (weight > -1) {
8294 region[4] = weight;
8295 }
8296}
8297
8298/*===========================================================================
8299 * FUNCTION : convertFromRegions
8300 *
8301 * DESCRIPTION: helper method to convert from array to cam_rect_t
8302 *
8303 * PARAMETERS :
8304 * @rect : cam_rect_t struct to convert
8305 * @region : int32_t destination array
8306 * @weight : if we are converting from cam_area_t, weight is valid
8307 * else weight = -1
8308 *
8309 *==========================================================================*/
8310void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008311 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008312{
Thierry Strudel3d639192016-09-09 11:52:26 -07008313 int32_t x_min = frame_settings.find(tag).data.i32[0];
8314 int32_t y_min = frame_settings.find(tag).data.i32[1];
8315 int32_t x_max = frame_settings.find(tag).data.i32[2];
8316 int32_t y_max = frame_settings.find(tag).data.i32[3];
8317 roi.weight = frame_settings.find(tag).data.i32[4];
8318 roi.rect.left = x_min;
8319 roi.rect.top = y_min;
8320 roi.rect.width = x_max - x_min;
8321 roi.rect.height = y_max - y_min;
8322}
8323
8324/*===========================================================================
8325 * FUNCTION : resetIfNeededROI
8326 *
8327 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8328 * crop region
8329 *
8330 * PARAMETERS :
8331 * @roi : cam_area_t struct to resize
8332 * @scalerCropRegion : cam_crop_region_t region to compare against
8333 *
8334 *
8335 *==========================================================================*/
8336bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8337 const cam_crop_region_t* scalerCropRegion)
8338{
8339 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8340 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8341 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8342 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8343
8344 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8345 * without having this check the calculations below to validate if the roi
8346 * is inside scalar crop region will fail resulting in the roi not being
8347 * reset causing algorithm to continue to use stale roi window
8348 */
8349 if (roi->weight == 0) {
8350 return true;
8351 }
8352
8353 if ((roi_x_max < scalerCropRegion->left) ||
8354 // right edge of roi window is left of scalar crop's left edge
8355 (roi_y_max < scalerCropRegion->top) ||
8356 // bottom edge of roi window is above scalar crop's top edge
8357 (roi->rect.left > crop_x_max) ||
8358 // left edge of roi window is beyond(right) of scalar crop's right edge
8359 (roi->rect.top > crop_y_max)){
8360 // top edge of roi windo is above scalar crop's top edge
8361 return false;
8362 }
8363 if (roi->rect.left < scalerCropRegion->left) {
8364 roi->rect.left = scalerCropRegion->left;
8365 }
8366 if (roi->rect.top < scalerCropRegion->top) {
8367 roi->rect.top = scalerCropRegion->top;
8368 }
8369 if (roi_x_max > crop_x_max) {
8370 roi_x_max = crop_x_max;
8371 }
8372 if (roi_y_max > crop_y_max) {
8373 roi_y_max = crop_y_max;
8374 }
8375 roi->rect.width = roi_x_max - roi->rect.left;
8376 roi->rect.height = roi_y_max - roi->rect.top;
8377 return true;
8378}
8379
8380/*===========================================================================
8381 * FUNCTION : convertLandmarks
8382 *
8383 * DESCRIPTION: helper method to extract the landmarks from face detection info
8384 *
8385 * PARAMETERS :
8386 * @landmark_data : input landmark data to be converted
8387 * @landmarks : int32_t destination array
8388 *
8389 *
8390 *==========================================================================*/
8391void QCamera3HardwareInterface::convertLandmarks(
8392 cam_face_landmarks_info_t landmark_data,
8393 int32_t *landmarks)
8394{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008395 if (landmark_data.is_left_eye_valid) {
8396 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8397 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8398 } else {
8399 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8400 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8401 }
8402
8403 if (landmark_data.is_right_eye_valid) {
8404 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8405 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8406 } else {
8407 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8408 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8409 }
8410
8411 if (landmark_data.is_mouth_valid) {
8412 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8413 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8414 } else {
8415 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8416 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8417 }
8418}
8419
8420/*===========================================================================
8421 * FUNCTION : setInvalidLandmarks
8422 *
8423 * DESCRIPTION: helper method to set invalid landmarks
8424 *
8425 * PARAMETERS :
8426 * @landmarks : int32_t destination array
8427 *
8428 *
8429 *==========================================================================*/
8430void QCamera3HardwareInterface::setInvalidLandmarks(
8431 int32_t *landmarks)
8432{
8433 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8434 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8435 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8436 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8437 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8438 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008439}
8440
8441#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008442
8443/*===========================================================================
8444 * FUNCTION : getCapabilities
8445 *
8446 * DESCRIPTION: query camera capability from back-end
8447 *
8448 * PARAMETERS :
8449 * @ops : mm-interface ops structure
8450 * @cam_handle : camera handle for which we need capability
8451 *
8452 * RETURN : ptr type of capability structure
8453 * capability for success
8454 * NULL for failure
8455 *==========================================================================*/
8456cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8457 uint32_t cam_handle)
8458{
8459 int rc = NO_ERROR;
8460 QCamera3HeapMemory *capabilityHeap = NULL;
8461 cam_capability_t *cap_ptr = NULL;
8462
8463 if (ops == NULL) {
8464 LOGE("Invalid arguments");
8465 return NULL;
8466 }
8467
8468 capabilityHeap = new QCamera3HeapMemory(1);
8469 if (capabilityHeap == NULL) {
8470 LOGE("creation of capabilityHeap failed");
8471 return NULL;
8472 }
8473
8474 /* Allocate memory for capability buffer */
8475 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8476 if(rc != OK) {
8477 LOGE("No memory for cappability");
8478 goto allocate_failed;
8479 }
8480
8481 /* Map memory for capability buffer */
8482 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8483
8484 rc = ops->map_buf(cam_handle,
8485 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8486 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8487 if(rc < 0) {
8488 LOGE("failed to map capability buffer");
8489 rc = FAILED_TRANSACTION;
8490 goto map_failed;
8491 }
8492
8493 /* Query Capability */
8494 rc = ops->query_capability(cam_handle);
8495 if(rc < 0) {
8496 LOGE("failed to query capability");
8497 rc = FAILED_TRANSACTION;
8498 goto query_failed;
8499 }
8500
8501 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8502 if (cap_ptr == NULL) {
8503 LOGE("out of memory");
8504 rc = NO_MEMORY;
8505 goto query_failed;
8506 }
8507
8508 memset(cap_ptr, 0, sizeof(cam_capability_t));
8509 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8510
8511 int index;
8512 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8513 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8514 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8515 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8516 }
8517
8518query_failed:
8519 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8520map_failed:
8521 capabilityHeap->deallocate();
8522allocate_failed:
8523 delete capabilityHeap;
8524
8525 if (rc != NO_ERROR) {
8526 return NULL;
8527 } else {
8528 return cap_ptr;
8529 }
8530}
8531
Thierry Strudel3d639192016-09-09 11:52:26 -07008532/*===========================================================================
8533 * FUNCTION : initCapabilities
8534 *
8535 * DESCRIPTION: initialize camera capabilities in static data struct
8536 *
8537 * PARAMETERS :
8538 * @cameraId : camera Id
8539 *
8540 * RETURN : int32_t type of status
8541 * NO_ERROR -- success
8542 * none-zero failure code
8543 *==========================================================================*/
8544int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8545{
8546 int rc = 0;
8547 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008548 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008549
8550 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8551 if (rc) {
8552 LOGE("camera_open failed. rc = %d", rc);
8553 goto open_failed;
8554 }
8555 if (!cameraHandle) {
8556 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8557 goto open_failed;
8558 }
8559
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008560 handle = get_main_camera_handle(cameraHandle->camera_handle);
8561 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8562 if (gCamCapability[cameraId] == NULL) {
8563 rc = FAILED_TRANSACTION;
8564 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008565 }
8566
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008567 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008568 if (is_dual_camera_by_idx(cameraId)) {
8569 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8570 gCamCapability[cameraId]->aux_cam_cap =
8571 getCapabilities(cameraHandle->ops, handle);
8572 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8573 rc = FAILED_TRANSACTION;
8574 free(gCamCapability[cameraId]);
8575 goto failed_op;
8576 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008577
8578 // Copy the main camera capability to main_cam_cap struct
8579 gCamCapability[cameraId]->main_cam_cap =
8580 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8581 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8582 LOGE("out of memory");
8583 rc = NO_MEMORY;
8584 goto failed_op;
8585 }
8586 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8587 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008588 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008589failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008590 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8591 cameraHandle = NULL;
8592open_failed:
8593 return rc;
8594}
8595
8596/*==========================================================================
8597 * FUNCTION : get3Aversion
8598 *
8599 * DESCRIPTION: get the Q3A S/W version
8600 *
8601 * PARAMETERS :
8602 * @sw_version: Reference of Q3A structure which will hold version info upon
8603 * return
8604 *
8605 * RETURN : None
8606 *
8607 *==========================================================================*/
8608void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8609{
8610 if(gCamCapability[mCameraId])
8611 sw_version = gCamCapability[mCameraId]->q3a_version;
8612 else
8613 LOGE("Capability structure NULL!");
8614}
8615
8616
8617/*===========================================================================
8618 * FUNCTION : initParameters
8619 *
8620 * DESCRIPTION: initialize camera parameters
8621 *
8622 * PARAMETERS :
8623 *
8624 * RETURN : int32_t type of status
8625 * NO_ERROR -- success
8626 * none-zero failure code
8627 *==========================================================================*/
8628int QCamera3HardwareInterface::initParameters()
8629{
8630 int rc = 0;
8631
8632 //Allocate Set Param Buffer
8633 mParamHeap = new QCamera3HeapMemory(1);
8634 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8635 if(rc != OK) {
8636 rc = NO_MEMORY;
8637 LOGE("Failed to allocate SETPARM Heap memory");
8638 delete mParamHeap;
8639 mParamHeap = NULL;
8640 return rc;
8641 }
8642
8643 //Map memory for parameters buffer
8644 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8645 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8646 mParamHeap->getFd(0),
8647 sizeof(metadata_buffer_t),
8648 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8649 if(rc < 0) {
8650 LOGE("failed to map SETPARM buffer");
8651 rc = FAILED_TRANSACTION;
8652 mParamHeap->deallocate();
8653 delete mParamHeap;
8654 mParamHeap = NULL;
8655 return rc;
8656 }
8657
8658 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8659
8660 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8661 return rc;
8662}
8663
8664/*===========================================================================
8665 * FUNCTION : deinitParameters
8666 *
8667 * DESCRIPTION: de-initialize camera parameters
8668 *
8669 * PARAMETERS :
8670 *
8671 * RETURN : NONE
8672 *==========================================================================*/
8673void QCamera3HardwareInterface::deinitParameters()
8674{
8675 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8676 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8677
8678 mParamHeap->deallocate();
8679 delete mParamHeap;
8680 mParamHeap = NULL;
8681
8682 mParameters = NULL;
8683
8684 free(mPrevParameters);
8685 mPrevParameters = NULL;
8686}
8687
8688/*===========================================================================
8689 * FUNCTION : calcMaxJpegSize
8690 *
8691 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8692 *
8693 * PARAMETERS :
8694 *
8695 * RETURN : max_jpeg_size
8696 *==========================================================================*/
8697size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8698{
8699 size_t max_jpeg_size = 0;
8700 size_t temp_width, temp_height;
8701 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8702 MAX_SIZES_CNT);
8703 for (size_t i = 0; i < count; i++) {
8704 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8705 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8706 if (temp_width * temp_height > max_jpeg_size ) {
8707 max_jpeg_size = temp_width * temp_height;
8708 }
8709 }
8710 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8711 return max_jpeg_size;
8712}
8713
8714/*===========================================================================
8715 * FUNCTION : getMaxRawSize
8716 *
8717 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8718 *
8719 * PARAMETERS :
8720 *
8721 * RETURN : Largest supported Raw Dimension
8722 *==========================================================================*/
8723cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8724{
8725 int max_width = 0;
8726 cam_dimension_t maxRawSize;
8727
8728 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8729 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8730 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8731 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8732 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8733 }
8734 }
8735 return maxRawSize;
8736}
8737
8738
8739/*===========================================================================
8740 * FUNCTION : calcMaxJpegDim
8741 *
8742 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8743 *
8744 * PARAMETERS :
8745 *
8746 * RETURN : max_jpeg_dim
8747 *==========================================================================*/
8748cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8749{
8750 cam_dimension_t max_jpeg_dim;
8751 cam_dimension_t curr_jpeg_dim;
8752 max_jpeg_dim.width = 0;
8753 max_jpeg_dim.height = 0;
8754 curr_jpeg_dim.width = 0;
8755 curr_jpeg_dim.height = 0;
8756 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8757 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8758 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8759 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8760 max_jpeg_dim.width * max_jpeg_dim.height ) {
8761 max_jpeg_dim.width = curr_jpeg_dim.width;
8762 max_jpeg_dim.height = curr_jpeg_dim.height;
8763 }
8764 }
8765 return max_jpeg_dim;
8766}
8767
8768/*===========================================================================
8769 * FUNCTION : addStreamConfig
8770 *
8771 * DESCRIPTION: adds the stream configuration to the array
8772 *
8773 * PARAMETERS :
8774 * @available_stream_configs : pointer to stream configuration array
8775 * @scalar_format : scalar format
8776 * @dim : configuration dimension
8777 * @config_type : input or output configuration type
8778 *
8779 * RETURN : NONE
8780 *==========================================================================*/
8781void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8782 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8783{
8784 available_stream_configs.add(scalar_format);
8785 available_stream_configs.add(dim.width);
8786 available_stream_configs.add(dim.height);
8787 available_stream_configs.add(config_type);
8788}
8789
8790/*===========================================================================
8791 * FUNCTION : suppportBurstCapture
8792 *
8793 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8794 *
8795 * PARAMETERS :
8796 * @cameraId : camera Id
8797 *
8798 * RETURN : true if camera supports BURST_CAPTURE
8799 * false otherwise
8800 *==========================================================================*/
8801bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8802{
8803 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8804 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8805 const int32_t highResWidth = 3264;
8806 const int32_t highResHeight = 2448;
8807
8808 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8809 // Maximum resolution images cannot be captured at >= 10fps
8810 // -> not supporting BURST_CAPTURE
8811 return false;
8812 }
8813
8814 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8815 // Maximum resolution images can be captured at >= 20fps
8816 // --> supporting BURST_CAPTURE
8817 return true;
8818 }
8819
8820 // Find the smallest highRes resolution, or largest resolution if there is none
8821 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8822 MAX_SIZES_CNT);
8823 size_t highRes = 0;
8824 while ((highRes + 1 < totalCnt) &&
8825 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8826 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8827 highResWidth * highResHeight)) {
8828 highRes++;
8829 }
8830 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8831 return true;
8832 } else {
8833 return false;
8834 }
8835}
8836
8837/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008838 * FUNCTION : getPDStatIndex
8839 *
8840 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8841 *
8842 * PARAMETERS :
8843 * @caps : camera capabilities
8844 *
8845 * RETURN : int32_t type
8846 * non-negative - on success
8847 * -1 - on failure
8848 *==========================================================================*/
8849int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8850 if (nullptr == caps) {
8851 return -1;
8852 }
8853
8854 uint32_t metaRawCount = caps->meta_raw_channel_count;
8855 int32_t ret = -1;
8856 for (size_t i = 0; i < metaRawCount; i++) {
8857 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8858 ret = i;
8859 break;
8860 }
8861 }
8862
8863 return ret;
8864}
8865
8866/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008867 * FUNCTION : initStaticMetadata
8868 *
8869 * DESCRIPTION: initialize the static metadata
8870 *
8871 * PARAMETERS :
8872 * @cameraId : camera Id
8873 *
8874 * RETURN : int32_t type of status
8875 * 0 -- success
8876 * non-zero failure code
8877 *==========================================================================*/
8878int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8879{
8880 int rc = 0;
8881 CameraMetadata staticInfo;
8882 size_t count = 0;
8883 bool limitedDevice = false;
8884 char prop[PROPERTY_VALUE_MAX];
8885 bool supportBurst = false;
8886
8887 supportBurst = supportBurstCapture(cameraId);
8888
8889 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8890 * guaranteed or if min fps of max resolution is less than 20 fps, its
8891 * advertised as limited device*/
8892 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8893 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8894 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8895 !supportBurst;
8896
8897 uint8_t supportedHwLvl = limitedDevice ?
8898 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008899#ifndef USE_HAL_3_3
8900 // LEVEL_3 - This device will support level 3.
8901 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8902#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008903 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008904#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008905
8906 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8907 &supportedHwLvl, 1);
8908
8909 bool facingBack = false;
8910 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8911 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8912 facingBack = true;
8913 }
8914 /*HAL 3 only*/
8915 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8916 &gCamCapability[cameraId]->min_focus_distance, 1);
8917
8918 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8919 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8920
8921 /*should be using focal lengths but sensor doesn't provide that info now*/
8922 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8923 &gCamCapability[cameraId]->focal_length,
8924 1);
8925
8926 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8927 gCamCapability[cameraId]->apertures,
8928 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8929
8930 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8931 gCamCapability[cameraId]->filter_densities,
8932 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8933
8934
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008935 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8936 size_t mode_count =
8937 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8938 for (size_t i = 0; i < mode_count; i++) {
8939 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008941 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008942 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008943
8944 int32_t lens_shading_map_size[] = {
8945 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8946 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8947 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8948 lens_shading_map_size,
8949 sizeof(lens_shading_map_size)/sizeof(int32_t));
8950
8951 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8952 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8953
8954 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8955 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8956
8957 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8958 &gCamCapability[cameraId]->max_frame_duration, 1);
8959
8960 camera_metadata_rational baseGainFactor = {
8961 gCamCapability[cameraId]->base_gain_factor.numerator,
8962 gCamCapability[cameraId]->base_gain_factor.denominator};
8963 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8964 &baseGainFactor, 1);
8965
8966 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8967 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8968
8969 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8970 gCamCapability[cameraId]->pixel_array_size.height};
8971 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8972 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8973
8974 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8975 gCamCapability[cameraId]->active_array_size.top,
8976 gCamCapability[cameraId]->active_array_size.width,
8977 gCamCapability[cameraId]->active_array_size.height};
8978 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8979 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8980
8981 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8982 &gCamCapability[cameraId]->white_level, 1);
8983
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008984 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8985 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8986 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008987 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008988 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008989
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008990#ifndef USE_HAL_3_3
8991 bool hasBlackRegions = false;
8992 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8993 LOGW("black_region_count: %d is bounded to %d",
8994 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8995 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8996 }
8997 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8998 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8999 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9000 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9001 }
9002 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9003 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9004 hasBlackRegions = true;
9005 }
9006#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009007 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9008 &gCamCapability[cameraId]->flash_charge_duration, 1);
9009
9010 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9011 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9012
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009013 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9014 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9015 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009016 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9017 &timestampSource, 1);
9018
Thierry Strudel54dc9782017-02-15 12:12:10 -08009019 //update histogram vendor data
9020 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009021 &gCamCapability[cameraId]->histogram_size, 1);
9022
Thierry Strudel54dc9782017-02-15 12:12:10 -08009023 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009024 &gCamCapability[cameraId]->max_histogram_count, 1);
9025
Shuzhen Wang14415f52016-11-16 18:26:18 -08009026 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9027 //so that app can request fewer number of bins than the maximum supported.
9028 std::vector<int32_t> histBins;
9029 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9030 histBins.push_back(maxHistBins);
9031 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9032 (maxHistBins & 0x1) == 0) {
9033 histBins.push_back(maxHistBins >> 1);
9034 maxHistBins >>= 1;
9035 }
9036 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9037 histBins.data(), histBins.size());
9038
Thierry Strudel3d639192016-09-09 11:52:26 -07009039 int32_t sharpness_map_size[] = {
9040 gCamCapability[cameraId]->sharpness_map_size.width,
9041 gCamCapability[cameraId]->sharpness_map_size.height};
9042
9043 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9044 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9045
9046 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9047 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9048
Emilian Peev0f3c3162017-03-15 12:57:46 +00009049 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9050 if (0 <= indexPD) {
9051 // Advertise PD stats data as part of the Depth capabilities
9052 int32_t depthWidth =
9053 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9054 int32_t depthHeight =
9055 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9056 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9057 assert(0 < depthSamplesCount);
9058 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9059 &depthSamplesCount, 1);
9060
9061 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9062 depthHeight,
9063 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9064 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9065 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9066 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9067 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9068
9069 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9070 depthHeight, 33333333,
9071 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9072 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9073 depthMinDuration,
9074 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9075
9076 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9077 depthHeight, 0,
9078 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9079 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9080 depthStallDuration,
9081 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9082
9083 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9084 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9085 }
9086
Thierry Strudel3d639192016-09-09 11:52:26 -07009087 int32_t scalar_formats[] = {
9088 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9089 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9090 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9091 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9092 HAL_PIXEL_FORMAT_RAW10,
9093 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009094 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9095 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9096 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009097
9098 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9099 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9100 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9101 count, MAX_SIZES_CNT, available_processed_sizes);
9102 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9103 available_processed_sizes, count * 2);
9104
9105 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9106 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9107 makeTable(gCamCapability[cameraId]->raw_dim,
9108 count, MAX_SIZES_CNT, available_raw_sizes);
9109 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9110 available_raw_sizes, count * 2);
9111
9112 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9113 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9114 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9115 count, MAX_SIZES_CNT, available_fps_ranges);
9116 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9117 available_fps_ranges, count * 2);
9118
9119 camera_metadata_rational exposureCompensationStep = {
9120 gCamCapability[cameraId]->exp_compensation_step.numerator,
9121 gCamCapability[cameraId]->exp_compensation_step.denominator};
9122 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9123 &exposureCompensationStep, 1);
9124
9125 Vector<uint8_t> availableVstabModes;
9126 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9127 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009128 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009129 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009130 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009131 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009132 count = IS_TYPE_MAX;
9133 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9134 for (size_t i = 0; i < count; i++) {
9135 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9136 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9137 eisSupported = true;
9138 break;
9139 }
9140 }
9141 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009142 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9143 }
9144 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9145 availableVstabModes.array(), availableVstabModes.size());
9146
9147 /*HAL 1 and HAL 3 common*/
9148 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9149 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9150 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009151 // Cap the max zoom to the max preferred value
9152 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009153 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9154 &maxZoom, 1);
9155
9156 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9157 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9158
9159 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9160 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9161 max3aRegions[2] = 0; /* AF not supported */
9162 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9163 max3aRegions, 3);
9164
9165 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9166 memset(prop, 0, sizeof(prop));
9167 property_get("persist.camera.facedetect", prop, "1");
9168 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9169 LOGD("Support face detection mode: %d",
9170 supportedFaceDetectMode);
9171
9172 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009173 /* support mode should be OFF if max number of face is 0 */
9174 if (maxFaces <= 0) {
9175 supportedFaceDetectMode = 0;
9176 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009177 Vector<uint8_t> availableFaceDetectModes;
9178 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9179 if (supportedFaceDetectMode == 1) {
9180 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9181 } else if (supportedFaceDetectMode == 2) {
9182 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9183 } else if (supportedFaceDetectMode == 3) {
9184 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9185 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9186 } else {
9187 maxFaces = 0;
9188 }
9189 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9190 availableFaceDetectModes.array(),
9191 availableFaceDetectModes.size());
9192 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9193 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009194 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9195 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9196 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009197
9198 int32_t exposureCompensationRange[] = {
9199 gCamCapability[cameraId]->exposure_compensation_min,
9200 gCamCapability[cameraId]->exposure_compensation_max};
9201 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9202 exposureCompensationRange,
9203 sizeof(exposureCompensationRange)/sizeof(int32_t));
9204
9205 uint8_t lensFacing = (facingBack) ?
9206 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9207 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9208
9209 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9210 available_thumbnail_sizes,
9211 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9212
9213 /*all sizes will be clubbed into this tag*/
9214 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9215 /*android.scaler.availableStreamConfigurations*/
9216 Vector<int32_t> available_stream_configs;
9217 cam_dimension_t active_array_dim;
9218 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9219 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009220
9221 /*advertise list of input dimensions supported based on below property.
9222 By default all sizes upto 5MP will be advertised.
9223 Note that the setprop resolution format should be WxH.
9224 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9225 To list all supported sizes, setprop needs to be set with "0x0" */
9226 cam_dimension_t minInputSize = {2592,1944}; //5MP
9227 memset(prop, 0, sizeof(prop));
9228 property_get("persist.camera.input.minsize", prop, "2592x1944");
9229 if (strlen(prop) > 0) {
9230 char *saveptr = NULL;
9231 char *token = strtok_r(prop, "x", &saveptr);
9232 if (token != NULL) {
9233 minInputSize.width = atoi(token);
9234 }
9235 token = strtok_r(NULL, "x", &saveptr);
9236 if (token != NULL) {
9237 minInputSize.height = atoi(token);
9238 }
9239 }
9240
Thierry Strudel3d639192016-09-09 11:52:26 -07009241 /* Add input/output stream configurations for each scalar formats*/
9242 for (size_t j = 0; j < scalar_formats_count; j++) {
9243 switch (scalar_formats[j]) {
9244 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9245 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9246 case HAL_PIXEL_FORMAT_RAW10:
9247 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9248 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9249 addStreamConfig(available_stream_configs, scalar_formats[j],
9250 gCamCapability[cameraId]->raw_dim[i],
9251 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9252 }
9253 break;
9254 case HAL_PIXEL_FORMAT_BLOB:
9255 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9256 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9257 addStreamConfig(available_stream_configs, scalar_formats[j],
9258 gCamCapability[cameraId]->picture_sizes_tbl[i],
9259 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9260 }
9261 break;
9262 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9263 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9264 default:
9265 cam_dimension_t largest_picture_size;
9266 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9267 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9268 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9269 addStreamConfig(available_stream_configs, scalar_formats[j],
9270 gCamCapability[cameraId]->picture_sizes_tbl[i],
9271 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009272 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9273 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9274 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9275 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9276 >= minInputSize.width) || (gCamCapability[cameraId]->
9277 picture_sizes_tbl[i].height >= minInputSize.height)) {
9278 addStreamConfig(available_stream_configs, scalar_formats[j],
9279 gCamCapability[cameraId]->picture_sizes_tbl[i],
9280 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9281 }
9282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009283 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009284
Thierry Strudel3d639192016-09-09 11:52:26 -07009285 break;
9286 }
9287 }
9288
9289 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9290 available_stream_configs.array(), available_stream_configs.size());
9291 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9292 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9293
9294 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9295 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9296
9297 /* android.scaler.availableMinFrameDurations */
9298 Vector<int64_t> available_min_durations;
9299 for (size_t j = 0; j < scalar_formats_count; j++) {
9300 switch (scalar_formats[j]) {
9301 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9302 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9303 case HAL_PIXEL_FORMAT_RAW10:
9304 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9305 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9306 available_min_durations.add(scalar_formats[j]);
9307 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9308 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9309 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9310 }
9311 break;
9312 default:
9313 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9314 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9315 available_min_durations.add(scalar_formats[j]);
9316 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9317 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9318 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9319 }
9320 break;
9321 }
9322 }
9323 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9324 available_min_durations.array(), available_min_durations.size());
9325
9326 Vector<int32_t> available_hfr_configs;
9327 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9328 int32_t fps = 0;
9329 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9330 case CAM_HFR_MODE_60FPS:
9331 fps = 60;
9332 break;
9333 case CAM_HFR_MODE_90FPS:
9334 fps = 90;
9335 break;
9336 case CAM_HFR_MODE_120FPS:
9337 fps = 120;
9338 break;
9339 case CAM_HFR_MODE_150FPS:
9340 fps = 150;
9341 break;
9342 case CAM_HFR_MODE_180FPS:
9343 fps = 180;
9344 break;
9345 case CAM_HFR_MODE_210FPS:
9346 fps = 210;
9347 break;
9348 case CAM_HFR_MODE_240FPS:
9349 fps = 240;
9350 break;
9351 case CAM_HFR_MODE_480FPS:
9352 fps = 480;
9353 break;
9354 case CAM_HFR_MODE_OFF:
9355 case CAM_HFR_MODE_MAX:
9356 default:
9357 break;
9358 }
9359
9360 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9361 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9362 /* For each HFR frame rate, need to advertise one variable fps range
9363 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9364 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9365 * set by the app. When video recording is started, [120, 120] is
9366 * set. This way sensor configuration does not change when recording
9367 * is started */
9368
9369 /* (width, height, fps_min, fps_max, batch_size_max) */
9370 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9371 j < MAX_SIZES_CNT; j++) {
9372 available_hfr_configs.add(
9373 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9374 available_hfr_configs.add(
9375 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9376 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9377 available_hfr_configs.add(fps);
9378 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9379
9380 /* (width, height, fps_min, fps_max, batch_size_max) */
9381 available_hfr_configs.add(
9382 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9383 available_hfr_configs.add(
9384 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9385 available_hfr_configs.add(fps);
9386 available_hfr_configs.add(fps);
9387 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9388 }
9389 }
9390 }
9391 //Advertise HFR capability only if the property is set
9392 memset(prop, 0, sizeof(prop));
9393 property_get("persist.camera.hal3hfr.enable", prop, "1");
9394 uint8_t hfrEnable = (uint8_t)atoi(prop);
9395
9396 if(hfrEnable && available_hfr_configs.array()) {
9397 staticInfo.update(
9398 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9399 available_hfr_configs.array(), available_hfr_configs.size());
9400 }
9401
9402 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9403 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9404 &max_jpeg_size, 1);
9405
9406 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9407 size_t size = 0;
9408 count = CAM_EFFECT_MODE_MAX;
9409 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9410 for (size_t i = 0; i < count; i++) {
9411 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9412 gCamCapability[cameraId]->supported_effects[i]);
9413 if (NAME_NOT_FOUND != val) {
9414 avail_effects[size] = (uint8_t)val;
9415 size++;
9416 }
9417 }
9418 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9419 avail_effects,
9420 size);
9421
9422 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9423 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9424 size_t supported_scene_modes_cnt = 0;
9425 count = CAM_SCENE_MODE_MAX;
9426 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9427 for (size_t i = 0; i < count; i++) {
9428 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9429 CAM_SCENE_MODE_OFF) {
9430 int val = lookupFwkName(SCENE_MODES_MAP,
9431 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9432 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009433
Thierry Strudel3d639192016-09-09 11:52:26 -07009434 if (NAME_NOT_FOUND != val) {
9435 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9436 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9437 supported_scene_modes_cnt++;
9438 }
9439 }
9440 }
9441 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9442 avail_scene_modes,
9443 supported_scene_modes_cnt);
9444
9445 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9446 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9447 supported_scene_modes_cnt,
9448 CAM_SCENE_MODE_MAX,
9449 scene_mode_overrides,
9450 supported_indexes,
9451 cameraId);
9452
9453 if (supported_scene_modes_cnt == 0) {
9454 supported_scene_modes_cnt = 1;
9455 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9456 }
9457
9458 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9459 scene_mode_overrides, supported_scene_modes_cnt * 3);
9460
9461 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9462 ANDROID_CONTROL_MODE_AUTO,
9463 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9464 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9465 available_control_modes,
9466 3);
9467
9468 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9469 size = 0;
9470 count = CAM_ANTIBANDING_MODE_MAX;
9471 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9472 for (size_t i = 0; i < count; i++) {
9473 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9474 gCamCapability[cameraId]->supported_antibandings[i]);
9475 if (NAME_NOT_FOUND != val) {
9476 avail_antibanding_modes[size] = (uint8_t)val;
9477 size++;
9478 }
9479
9480 }
9481 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9482 avail_antibanding_modes,
9483 size);
9484
9485 uint8_t avail_abberation_modes[] = {
9486 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9487 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9488 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9489 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9490 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9491 if (0 == count) {
9492 // If no aberration correction modes are available for a device, this advertise OFF mode
9493 size = 1;
9494 } else {
9495 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9496 // So, advertize all 3 modes if atleast any one mode is supported as per the
9497 // new M requirement
9498 size = 3;
9499 }
9500 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9501 avail_abberation_modes,
9502 size);
9503
9504 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9505 size = 0;
9506 count = CAM_FOCUS_MODE_MAX;
9507 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9508 for (size_t i = 0; i < count; i++) {
9509 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9510 gCamCapability[cameraId]->supported_focus_modes[i]);
9511 if (NAME_NOT_FOUND != val) {
9512 avail_af_modes[size] = (uint8_t)val;
9513 size++;
9514 }
9515 }
9516 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9517 avail_af_modes,
9518 size);
9519
9520 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9521 size = 0;
9522 count = CAM_WB_MODE_MAX;
9523 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9524 for (size_t i = 0; i < count; i++) {
9525 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9526 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9527 gCamCapability[cameraId]->supported_white_balances[i]);
9528 if (NAME_NOT_FOUND != val) {
9529 avail_awb_modes[size] = (uint8_t)val;
9530 size++;
9531 }
9532 }
9533 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9534 avail_awb_modes,
9535 size);
9536
9537 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9538 count = CAM_FLASH_FIRING_LEVEL_MAX;
9539 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9540 count);
9541 for (size_t i = 0; i < count; i++) {
9542 available_flash_levels[i] =
9543 gCamCapability[cameraId]->supported_firing_levels[i];
9544 }
9545 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9546 available_flash_levels, count);
9547
9548 uint8_t flashAvailable;
9549 if (gCamCapability[cameraId]->flash_available)
9550 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9551 else
9552 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9553 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9554 &flashAvailable, 1);
9555
9556 Vector<uint8_t> avail_ae_modes;
9557 count = CAM_AE_MODE_MAX;
9558 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9559 for (size_t i = 0; i < count; i++) {
9560 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
9561 }
9562 if (flashAvailable) {
9563 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9564 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009565 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009566 }
9567 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9568 avail_ae_modes.array(),
9569 avail_ae_modes.size());
9570
9571 int32_t sensitivity_range[2];
9572 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9573 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9574 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9575 sensitivity_range,
9576 sizeof(sensitivity_range) / sizeof(int32_t));
9577
9578 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9579 &gCamCapability[cameraId]->max_analog_sensitivity,
9580 1);
9581
9582 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9583 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9584 &sensor_orientation,
9585 1);
9586
9587 int32_t max_output_streams[] = {
9588 MAX_STALLING_STREAMS,
9589 MAX_PROCESSED_STREAMS,
9590 MAX_RAW_STREAMS};
9591 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9592 max_output_streams,
9593 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9594
9595 uint8_t avail_leds = 0;
9596 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9597 &avail_leds, 0);
9598
9599 uint8_t focus_dist_calibrated;
9600 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9601 gCamCapability[cameraId]->focus_dist_calibrated);
9602 if (NAME_NOT_FOUND != val) {
9603 focus_dist_calibrated = (uint8_t)val;
9604 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9605 &focus_dist_calibrated, 1);
9606 }
9607
9608 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9609 size = 0;
9610 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9611 MAX_TEST_PATTERN_CNT);
9612 for (size_t i = 0; i < count; i++) {
9613 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9614 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9615 if (NAME_NOT_FOUND != testpatternMode) {
9616 avail_testpattern_modes[size] = testpatternMode;
9617 size++;
9618 }
9619 }
9620 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9621 avail_testpattern_modes,
9622 size);
9623
9624 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9625 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9626 &max_pipeline_depth,
9627 1);
9628
9629 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9630 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9631 &partial_result_count,
9632 1);
9633
9634 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9635 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9636
9637 Vector<uint8_t> available_capabilities;
9638 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9639 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9640 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9641 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9642 if (supportBurst) {
9643 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9644 }
9645 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9646 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9647 if (hfrEnable && available_hfr_configs.array()) {
9648 available_capabilities.add(
9649 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9650 }
9651
9652 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9653 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9654 }
9655 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9656 available_capabilities.array(),
9657 available_capabilities.size());
9658
9659 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9660 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9661 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9662 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9663
9664 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9665 &aeLockAvailable, 1);
9666
9667 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9668 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9669 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9670 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9671
9672 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9673 &awbLockAvailable, 1);
9674
9675 int32_t max_input_streams = 1;
9676 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9677 &max_input_streams,
9678 1);
9679
9680 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9681 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9682 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9683 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9684 HAL_PIXEL_FORMAT_YCbCr_420_888};
9685 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9686 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9687
9688 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9689 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9690 &max_latency,
9691 1);
9692
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009693#ifndef USE_HAL_3_3
9694 int32_t isp_sensitivity_range[2];
9695 isp_sensitivity_range[0] =
9696 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9697 isp_sensitivity_range[1] =
9698 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9699 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9700 isp_sensitivity_range,
9701 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9702#endif
9703
Thierry Strudel3d639192016-09-09 11:52:26 -07009704 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9705 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9706 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9707 available_hot_pixel_modes,
9708 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9709
9710 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9711 ANDROID_SHADING_MODE_FAST,
9712 ANDROID_SHADING_MODE_HIGH_QUALITY};
9713 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9714 available_shading_modes,
9715 3);
9716
9717 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9718 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9719 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9720 available_lens_shading_map_modes,
9721 2);
9722
9723 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9724 ANDROID_EDGE_MODE_FAST,
9725 ANDROID_EDGE_MODE_HIGH_QUALITY,
9726 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9727 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9728 available_edge_modes,
9729 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9730
9731 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9732 ANDROID_NOISE_REDUCTION_MODE_FAST,
9733 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9734 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9735 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9736 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9737 available_noise_red_modes,
9738 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9739
9740 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9741 ANDROID_TONEMAP_MODE_FAST,
9742 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9743 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9744 available_tonemap_modes,
9745 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9746
9747 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9748 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9749 available_hot_pixel_map_modes,
9750 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9751
9752 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9753 gCamCapability[cameraId]->reference_illuminant1);
9754 if (NAME_NOT_FOUND != val) {
9755 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9756 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9757 }
9758
9759 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9760 gCamCapability[cameraId]->reference_illuminant2);
9761 if (NAME_NOT_FOUND != val) {
9762 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9763 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9764 }
9765
9766 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9767 (void *)gCamCapability[cameraId]->forward_matrix1,
9768 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9769
9770 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9771 (void *)gCamCapability[cameraId]->forward_matrix2,
9772 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9773
9774 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9775 (void *)gCamCapability[cameraId]->color_transform1,
9776 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9777
9778 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9779 (void *)gCamCapability[cameraId]->color_transform2,
9780 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9781
9782 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9783 (void *)gCamCapability[cameraId]->calibration_transform1,
9784 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9785
9786 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9787 (void *)gCamCapability[cameraId]->calibration_transform2,
9788 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9789
9790 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9791 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9792 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9793 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9794 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9795 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9796 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9797 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9798 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9799 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9800 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9801 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9802 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9803 ANDROID_JPEG_GPS_COORDINATES,
9804 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9805 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9806 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9807 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9808 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9809 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9810 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9811 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9812 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9813 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009814#ifndef USE_HAL_3_3
9815 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9816#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009817 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009818 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009819 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9820 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009821 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009822 /* DevCamDebug metadata request_keys_basic */
9823 DEVCAMDEBUG_META_ENABLE,
9824 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009825 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9826 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009827 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009828
9829 size_t request_keys_cnt =
9830 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9831 Vector<int32_t> available_request_keys;
9832 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9833 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9834 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9835 }
9836
9837 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9838 available_request_keys.array(), available_request_keys.size());
9839
9840 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9841 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9842 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9843 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9844 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9845 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9846 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9847 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9848 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9849 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9850 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9851 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9852 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9853 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9854 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9855 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9856 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009857 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009858 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9859 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9860 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009861 ANDROID_STATISTICS_FACE_SCORES,
9862#ifndef USE_HAL_3_3
9863 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9864#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009865 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009866 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009867 // DevCamDebug metadata result_keys_basic
9868 DEVCAMDEBUG_META_ENABLE,
9869 // DevCamDebug metadata result_keys AF
9870 DEVCAMDEBUG_AF_LENS_POSITION,
9871 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9872 DEVCAMDEBUG_AF_TOF_DISTANCE,
9873 DEVCAMDEBUG_AF_LUMA,
9874 DEVCAMDEBUG_AF_HAF_STATE,
9875 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9876 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9877 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9878 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9879 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9880 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9881 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9882 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9883 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9884 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9885 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9886 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9887 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9888 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9889 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9890 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9891 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9892 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9893 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9894 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9895 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9896 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9897 // DevCamDebug metadata result_keys AEC
9898 DEVCAMDEBUG_AEC_TARGET_LUMA,
9899 DEVCAMDEBUG_AEC_COMP_LUMA,
9900 DEVCAMDEBUG_AEC_AVG_LUMA,
9901 DEVCAMDEBUG_AEC_CUR_LUMA,
9902 DEVCAMDEBUG_AEC_LINECOUNT,
9903 DEVCAMDEBUG_AEC_REAL_GAIN,
9904 DEVCAMDEBUG_AEC_EXP_INDEX,
9905 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009906 // DevCamDebug metadata result_keys zzHDR
9907 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9908 DEVCAMDEBUG_AEC_L_LINECOUNT,
9909 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9910 DEVCAMDEBUG_AEC_S_LINECOUNT,
9911 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9912 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9913 // DevCamDebug metadata result_keys ADRC
9914 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9915 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9916 DEVCAMDEBUG_AEC_GTM_RATIO,
9917 DEVCAMDEBUG_AEC_LTM_RATIO,
9918 DEVCAMDEBUG_AEC_LA_RATIO,
9919 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009920 // DevCamDebug metadata result_keys AWB
9921 DEVCAMDEBUG_AWB_R_GAIN,
9922 DEVCAMDEBUG_AWB_G_GAIN,
9923 DEVCAMDEBUG_AWB_B_GAIN,
9924 DEVCAMDEBUG_AWB_CCT,
9925 DEVCAMDEBUG_AWB_DECISION,
9926 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009927 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9928 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9929 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009930 };
9931
Thierry Strudel3d639192016-09-09 11:52:26 -07009932 size_t result_keys_cnt =
9933 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9934
9935 Vector<int32_t> available_result_keys;
9936 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9937 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9938 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9939 }
9940 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9941 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9942 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9943 }
9944 if (supportedFaceDetectMode == 1) {
9945 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9946 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9947 } else if ((supportedFaceDetectMode == 2) ||
9948 (supportedFaceDetectMode == 3)) {
9949 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9950 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9951 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009952#ifndef USE_HAL_3_3
9953 if (hasBlackRegions) {
9954 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9955 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9956 }
9957#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009958 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9959 available_result_keys.array(), available_result_keys.size());
9960
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009961 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009962 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9963 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9964 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9965 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9966 ANDROID_SCALER_CROPPING_TYPE,
9967 ANDROID_SYNC_MAX_LATENCY,
9968 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9969 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9970 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9971 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9972 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9973 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9974 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9975 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9976 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9977 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9978 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9979 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9980 ANDROID_LENS_FACING,
9981 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9982 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9983 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9984 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9985 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9986 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9987 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9988 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9989 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9990 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9991 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9992 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9993 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9994 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9995 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9996 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9997 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9998 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9999 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10000 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010001 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10003 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10004 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10005 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10006 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10007 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10008 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10009 ANDROID_CONTROL_AVAILABLE_MODES,
10010 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10011 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10012 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10013 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010014 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10015#ifndef USE_HAL_3_3
10016 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10017 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10018#endif
10019 };
10020
10021 Vector<int32_t> available_characteristics_keys;
10022 available_characteristics_keys.appendArray(characteristics_keys_basic,
10023 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10024#ifndef USE_HAL_3_3
10025 if (hasBlackRegions) {
10026 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10027 }
10028#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010029
10030 if (0 <= indexPD) {
10031 int32_t depthKeys[] = {
10032 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10033 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10034 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10035 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10036 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10037 };
10038 available_characteristics_keys.appendArray(depthKeys,
10039 sizeof(depthKeys) / sizeof(depthKeys[0]));
10040 }
10041
Thierry Strudel3d639192016-09-09 11:52:26 -070010042 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010043 available_characteristics_keys.array(),
10044 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010045
10046 /*available stall durations depend on the hw + sw and will be different for different devices */
10047 /*have to add for raw after implementation*/
10048 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10049 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10050
10051 Vector<int64_t> available_stall_durations;
10052 for (uint32_t j = 0; j < stall_formats_count; j++) {
10053 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10054 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10055 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10056 available_stall_durations.add(stall_formats[j]);
10057 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10058 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10059 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10060 }
10061 } else {
10062 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10063 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10064 available_stall_durations.add(stall_formats[j]);
10065 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10066 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10067 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10068 }
10069 }
10070 }
10071 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10072 available_stall_durations.array(),
10073 available_stall_durations.size());
10074
10075 //QCAMERA3_OPAQUE_RAW
10076 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10077 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10078 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10079 case LEGACY_RAW:
10080 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10081 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10082 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10083 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10084 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10085 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10086 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10087 break;
10088 case MIPI_RAW:
10089 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10090 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10091 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10092 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10093 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10094 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10095 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10096 break;
10097 default:
10098 LOGE("unknown opaque_raw_format %d",
10099 gCamCapability[cameraId]->opaque_raw_fmt);
10100 break;
10101 }
10102 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10103
10104 Vector<int32_t> strides;
10105 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10106 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10107 cam_stream_buf_plane_info_t buf_planes;
10108 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10109 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10110 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10111 &gCamCapability[cameraId]->padding_info, &buf_planes);
10112 strides.add(buf_planes.plane_info.mp[0].stride);
10113 }
10114 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10115 strides.size());
10116
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010117 //TBD: remove the following line once backend advertises zzHDR in feature mask
10118 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010119 //Video HDR default
10120 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10121 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010122 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010123 int32_t vhdr_mode[] = {
10124 QCAMERA3_VIDEO_HDR_MODE_OFF,
10125 QCAMERA3_VIDEO_HDR_MODE_ON};
10126
10127 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10128 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10129 vhdr_mode, vhdr_mode_count);
10130 }
10131
Thierry Strudel3d639192016-09-09 11:52:26 -070010132 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10133 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10134 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10135
10136 uint8_t isMonoOnly =
10137 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10138 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10139 &isMonoOnly, 1);
10140
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010141#ifndef USE_HAL_3_3
10142 Vector<int32_t> opaque_size;
10143 for (size_t j = 0; j < scalar_formats_count; j++) {
10144 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10145 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10146 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10147 cam_stream_buf_plane_info_t buf_planes;
10148
10149 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10150 &gCamCapability[cameraId]->padding_info, &buf_planes);
10151
10152 if (rc == 0) {
10153 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10154 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10155 opaque_size.add(buf_planes.plane_info.frame_len);
10156 }else {
10157 LOGE("raw frame calculation failed!");
10158 }
10159 }
10160 }
10161 }
10162
10163 if ((opaque_size.size() > 0) &&
10164 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10165 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10166 else
10167 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10168#endif
10169
Thierry Strudel04e026f2016-10-10 11:27:36 -070010170 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10171 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10172 size = 0;
10173 count = CAM_IR_MODE_MAX;
10174 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10175 for (size_t i = 0; i < count; i++) {
10176 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10177 gCamCapability[cameraId]->supported_ir_modes[i]);
10178 if (NAME_NOT_FOUND != val) {
10179 avail_ir_modes[size] = (int32_t)val;
10180 size++;
10181 }
10182 }
10183 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10184 avail_ir_modes, size);
10185 }
10186
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010187 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10188 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10189 size = 0;
10190 count = CAM_AEC_CONVERGENCE_MAX;
10191 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10192 for (size_t i = 0; i < count; i++) {
10193 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10194 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10195 if (NAME_NOT_FOUND != val) {
10196 available_instant_aec_modes[size] = (int32_t)val;
10197 size++;
10198 }
10199 }
10200 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10201 available_instant_aec_modes, size);
10202 }
10203
Thierry Strudel54dc9782017-02-15 12:12:10 -080010204 int32_t sharpness_range[] = {
10205 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10206 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10207 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10208
10209 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10210 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10211 size = 0;
10212 count = CAM_BINNING_CORRECTION_MODE_MAX;
10213 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10214 for (size_t i = 0; i < count; i++) {
10215 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10216 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10217 gCamCapability[cameraId]->supported_binning_modes[i]);
10218 if (NAME_NOT_FOUND != val) {
10219 avail_binning_modes[size] = (int32_t)val;
10220 size++;
10221 }
10222 }
10223 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10224 avail_binning_modes, size);
10225 }
10226
10227 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10228 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10229 size = 0;
10230 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10231 for (size_t i = 0; i < count; i++) {
10232 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10233 gCamCapability[cameraId]->supported_aec_modes[i]);
10234 if (NAME_NOT_FOUND != val)
10235 available_aec_modes[size++] = val;
10236 }
10237 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10238 available_aec_modes, size);
10239 }
10240
10241 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10242 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10243 size = 0;
10244 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10245 for (size_t i = 0; i < count; i++) {
10246 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10247 gCamCapability[cameraId]->supported_iso_modes[i]);
10248 if (NAME_NOT_FOUND != val)
10249 available_iso_modes[size++] = val;
10250 }
10251 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10252 available_iso_modes, size);
10253 }
10254
10255 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10256 for (size_t i = 0; i < count; i++)
10257 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10258 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10259 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10260
10261 int32_t available_saturation_range[4];
10262 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10263 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10264 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10265 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10266 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10267 available_saturation_range, 4);
10268
10269 uint8_t is_hdr_values[2];
10270 is_hdr_values[0] = 0;
10271 is_hdr_values[1] = 1;
10272 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10273 is_hdr_values, 2);
10274
10275 float is_hdr_confidence_range[2];
10276 is_hdr_confidence_range[0] = 0.0;
10277 is_hdr_confidence_range[1] = 1.0;
10278 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10279 is_hdr_confidence_range, 2);
10280
Emilian Peev0a972ef2017-03-16 10:25:53 +000010281 size_t eepromLength = strnlen(
10282 reinterpret_cast<const char *>(
10283 gCamCapability[cameraId]->eeprom_version_info),
10284 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10285 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010286 char easelInfo[] = ",E:N";
10287 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10288 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10289 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010290 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10291 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010292 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010293 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10294 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10295 }
10296
Thierry Strudel3d639192016-09-09 11:52:26 -070010297 gStaticMetadata[cameraId] = staticInfo.release();
10298 return rc;
10299}
10300
10301/*===========================================================================
10302 * FUNCTION : makeTable
10303 *
10304 * DESCRIPTION: make a table of sizes
10305 *
10306 * PARAMETERS :
10307 *
10308 *
10309 *==========================================================================*/
10310void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10311 size_t max_size, int32_t *sizeTable)
10312{
10313 size_t j = 0;
10314 if (size > max_size) {
10315 size = max_size;
10316 }
10317 for (size_t i = 0; i < size; i++) {
10318 sizeTable[j] = dimTable[i].width;
10319 sizeTable[j+1] = dimTable[i].height;
10320 j+=2;
10321 }
10322}
10323
10324/*===========================================================================
10325 * FUNCTION : makeFPSTable
10326 *
10327 * DESCRIPTION: make a table of fps ranges
10328 *
10329 * PARAMETERS :
10330 *
10331 *==========================================================================*/
10332void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10333 size_t max_size, int32_t *fpsRangesTable)
10334{
10335 size_t j = 0;
10336 if (size > max_size) {
10337 size = max_size;
10338 }
10339 for (size_t i = 0; i < size; i++) {
10340 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10341 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10342 j+=2;
10343 }
10344}
10345
10346/*===========================================================================
10347 * FUNCTION : makeOverridesList
10348 *
10349 * DESCRIPTION: make a list of scene mode overrides
10350 *
10351 * PARAMETERS :
10352 *
10353 *
10354 *==========================================================================*/
10355void QCamera3HardwareInterface::makeOverridesList(
10356 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10357 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10358{
10359 /*daemon will give a list of overrides for all scene modes.
10360 However we should send the fwk only the overrides for the scene modes
10361 supported by the framework*/
10362 size_t j = 0;
10363 if (size > max_size) {
10364 size = max_size;
10365 }
10366 size_t focus_count = CAM_FOCUS_MODE_MAX;
10367 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10368 focus_count);
10369 for (size_t i = 0; i < size; i++) {
10370 bool supt = false;
10371 size_t index = supported_indexes[i];
10372 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10373 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10374 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10375 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10376 overridesTable[index].awb_mode);
10377 if (NAME_NOT_FOUND != val) {
10378 overridesList[j+1] = (uint8_t)val;
10379 }
10380 uint8_t focus_override = overridesTable[index].af_mode;
10381 for (size_t k = 0; k < focus_count; k++) {
10382 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10383 supt = true;
10384 break;
10385 }
10386 }
10387 if (supt) {
10388 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10389 focus_override);
10390 if (NAME_NOT_FOUND != val) {
10391 overridesList[j+2] = (uint8_t)val;
10392 }
10393 } else {
10394 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10395 }
10396 j+=3;
10397 }
10398}
10399
10400/*===========================================================================
10401 * FUNCTION : filterJpegSizes
10402 *
10403 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10404 * could be downscaled to
10405 *
10406 * PARAMETERS :
10407 *
10408 * RETURN : length of jpegSizes array
10409 *==========================================================================*/
10410
10411size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10412 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10413 uint8_t downscale_factor)
10414{
10415 if (0 == downscale_factor) {
10416 downscale_factor = 1;
10417 }
10418
10419 int32_t min_width = active_array_size.width / downscale_factor;
10420 int32_t min_height = active_array_size.height / downscale_factor;
10421 size_t jpegSizesCnt = 0;
10422 if (processedSizesCnt > maxCount) {
10423 processedSizesCnt = maxCount;
10424 }
10425 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10426 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10427 jpegSizes[jpegSizesCnt] = processedSizes[i];
10428 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10429 jpegSizesCnt += 2;
10430 }
10431 }
10432 return jpegSizesCnt;
10433}
10434
10435/*===========================================================================
10436 * FUNCTION : computeNoiseModelEntryS
10437 *
10438 * DESCRIPTION: function to map a given sensitivity to the S noise
10439 * model parameters in the DNG noise model.
10440 *
10441 * PARAMETERS : sens : the sensor sensitivity
10442 *
10443 ** RETURN : S (sensor amplification) noise
10444 *
10445 *==========================================================================*/
10446double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10447 double s = gCamCapability[mCameraId]->gradient_S * sens +
10448 gCamCapability[mCameraId]->offset_S;
10449 return ((s < 0.0) ? 0.0 : s);
10450}
10451
10452/*===========================================================================
10453 * FUNCTION : computeNoiseModelEntryO
10454 *
10455 * DESCRIPTION: function to map a given sensitivity to the O noise
10456 * model parameters in the DNG noise model.
10457 *
10458 * PARAMETERS : sens : the sensor sensitivity
10459 *
10460 ** RETURN : O (sensor readout) noise
10461 *
10462 *==========================================================================*/
10463double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10464 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10465 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10466 1.0 : (1.0 * sens / max_analog_sens);
10467 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10468 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10469 return ((o < 0.0) ? 0.0 : o);
10470}
10471
10472/*===========================================================================
10473 * FUNCTION : getSensorSensitivity
10474 *
10475 * DESCRIPTION: convert iso_mode to an integer value
10476 *
10477 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10478 *
10479 ** RETURN : sensitivity supported by sensor
10480 *
10481 *==========================================================================*/
10482int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10483{
10484 int32_t sensitivity;
10485
10486 switch (iso_mode) {
10487 case CAM_ISO_MODE_100:
10488 sensitivity = 100;
10489 break;
10490 case CAM_ISO_MODE_200:
10491 sensitivity = 200;
10492 break;
10493 case CAM_ISO_MODE_400:
10494 sensitivity = 400;
10495 break;
10496 case CAM_ISO_MODE_800:
10497 sensitivity = 800;
10498 break;
10499 case CAM_ISO_MODE_1600:
10500 sensitivity = 1600;
10501 break;
10502 default:
10503 sensitivity = -1;
10504 break;
10505 }
10506 return sensitivity;
10507}
10508
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010509int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010510 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010511 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10512 // to connect to Easel.
10513 bool doNotpowerOnEasel =
10514 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10515
10516 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010517 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10518 return OK;
10519 }
10520
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010521 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010522 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010523 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010524 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010525 return res;
10526 }
10527
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010528 EaselManagerClientOpened = true;
10529
10530 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010531 if (res != OK) {
10532 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10533 }
10534
10535 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010536 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010537 }
10538
10539 return OK;
10540}
10541
Thierry Strudel3d639192016-09-09 11:52:26 -070010542/*===========================================================================
10543 * FUNCTION : getCamInfo
10544 *
10545 * DESCRIPTION: query camera capabilities
10546 *
10547 * PARAMETERS :
10548 * @cameraId : camera Id
10549 * @info : camera info struct to be filled in with camera capabilities
10550 *
10551 * RETURN : int type of status
10552 * NO_ERROR -- success
10553 * none-zero failure code
10554 *==========================================================================*/
10555int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10556 struct camera_info *info)
10557{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010558 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010559 int rc = 0;
10560
10561 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010562
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010563 {
10564 Mutex::Autolock l(gHdrPlusClientLock);
10565 rc = initHdrPlusClientLocked();
10566 if (rc != OK) {
10567 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10568 pthread_mutex_unlock(&gCamLock);
10569 return rc;
10570 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010571 }
10572
Thierry Strudel3d639192016-09-09 11:52:26 -070010573 if (NULL == gCamCapability[cameraId]) {
10574 rc = initCapabilities(cameraId);
10575 if (rc < 0) {
10576 pthread_mutex_unlock(&gCamLock);
10577 return rc;
10578 }
10579 }
10580
10581 if (NULL == gStaticMetadata[cameraId]) {
10582 rc = initStaticMetadata(cameraId);
10583 if (rc < 0) {
10584 pthread_mutex_unlock(&gCamLock);
10585 return rc;
10586 }
10587 }
10588
10589 switch(gCamCapability[cameraId]->position) {
10590 case CAM_POSITION_BACK:
10591 case CAM_POSITION_BACK_AUX:
10592 info->facing = CAMERA_FACING_BACK;
10593 break;
10594
10595 case CAM_POSITION_FRONT:
10596 case CAM_POSITION_FRONT_AUX:
10597 info->facing = CAMERA_FACING_FRONT;
10598 break;
10599
10600 default:
10601 LOGE("Unknown position type %d for camera id:%d",
10602 gCamCapability[cameraId]->position, cameraId);
10603 rc = -1;
10604 break;
10605 }
10606
10607
10608 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010609#ifndef USE_HAL_3_3
10610 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10611#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010612 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010613#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010614 info->static_camera_characteristics = gStaticMetadata[cameraId];
10615
10616 //For now assume both cameras can operate independently.
10617 info->conflicting_devices = NULL;
10618 info->conflicting_devices_length = 0;
10619
10620 //resource cost is 100 * MIN(1.0, m/M),
10621 //where m is throughput requirement with maximum stream configuration
10622 //and M is CPP maximum throughput.
10623 float max_fps = 0.0;
10624 for (uint32_t i = 0;
10625 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10626 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10627 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10628 }
10629 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10630 gCamCapability[cameraId]->active_array_size.width *
10631 gCamCapability[cameraId]->active_array_size.height * max_fps /
10632 gCamCapability[cameraId]->max_pixel_bandwidth;
10633 info->resource_cost = 100 * MIN(1.0, ratio);
10634 LOGI("camera %d resource cost is %d", cameraId,
10635 info->resource_cost);
10636
10637 pthread_mutex_unlock(&gCamLock);
10638 return rc;
10639}
10640
10641/*===========================================================================
10642 * FUNCTION : translateCapabilityToMetadata
10643 *
10644 * DESCRIPTION: translate the capability into camera_metadata_t
10645 *
10646 * PARAMETERS : type of the request
10647 *
10648 *
10649 * RETURN : success: camera_metadata_t*
10650 * failure: NULL
10651 *
10652 *==========================================================================*/
10653camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10654{
10655 if (mDefaultMetadata[type] != NULL) {
10656 return mDefaultMetadata[type];
10657 }
10658 //first time we are handling this request
10659 //fill up the metadata structure using the wrapper class
10660 CameraMetadata settings;
10661 //translate from cam_capability_t to camera_metadata_tag_t
10662 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10663 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10664 int32_t defaultRequestID = 0;
10665 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10666
10667 /* OIS disable */
10668 char ois_prop[PROPERTY_VALUE_MAX];
10669 memset(ois_prop, 0, sizeof(ois_prop));
10670 property_get("persist.camera.ois.disable", ois_prop, "0");
10671 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10672
10673 /* Force video to use OIS */
10674 char videoOisProp[PROPERTY_VALUE_MAX];
10675 memset(videoOisProp, 0, sizeof(videoOisProp));
10676 property_get("persist.camera.ois.video", videoOisProp, "1");
10677 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010678
10679 // Hybrid AE enable/disable
10680 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10681 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10682 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10683 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10684
Thierry Strudel3d639192016-09-09 11:52:26 -070010685 uint8_t controlIntent = 0;
10686 uint8_t focusMode;
10687 uint8_t vsMode;
10688 uint8_t optStabMode;
10689 uint8_t cacMode;
10690 uint8_t edge_mode;
10691 uint8_t noise_red_mode;
10692 uint8_t tonemap_mode;
10693 bool highQualityModeEntryAvailable = FALSE;
10694 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010695 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010696 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10697 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010698 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010699 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010700
Thierry Strudel3d639192016-09-09 11:52:26 -070010701 switch (type) {
10702 case CAMERA3_TEMPLATE_PREVIEW:
10703 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10704 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10705 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10706 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10707 edge_mode = ANDROID_EDGE_MODE_FAST;
10708 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10709 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10710 break;
10711 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10712 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10713 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10714 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10715 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10716 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10717 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10718 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10719 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10720 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10721 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10722 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10723 highQualityModeEntryAvailable = TRUE;
10724 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10725 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10726 fastModeEntryAvailable = TRUE;
10727 }
10728 }
10729 if (highQualityModeEntryAvailable) {
10730 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10731 } else if (fastModeEntryAvailable) {
10732 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10733 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010734 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10735 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10736 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010737 break;
10738 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10739 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10740 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10741 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010742 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10743 edge_mode = ANDROID_EDGE_MODE_FAST;
10744 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10745 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10746 if (forceVideoOis)
10747 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10748 break;
10749 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10750 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10751 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10752 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010753 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10754 edge_mode = ANDROID_EDGE_MODE_FAST;
10755 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10756 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10757 if (forceVideoOis)
10758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10759 break;
10760 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10761 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10762 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10763 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10764 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10765 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10766 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10767 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10768 break;
10769 case CAMERA3_TEMPLATE_MANUAL:
10770 edge_mode = ANDROID_EDGE_MODE_FAST;
10771 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10772 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10773 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10774 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10775 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10776 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10777 break;
10778 default:
10779 edge_mode = ANDROID_EDGE_MODE_FAST;
10780 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10781 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10782 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10783 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10784 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10785 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10786 break;
10787 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010788 // Set CAC to OFF if underlying device doesn't support
10789 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10790 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10791 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010792 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10793 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10794 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10795 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10796 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10797 }
10798 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010799 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010800 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010801
10802 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10803 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10804 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10805 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10806 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10807 || ois_disable)
10808 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10809 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010810 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010811
10812 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10813 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10814
10815 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10816 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10817
10818 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10819 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10820
10821 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10822 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10823
10824 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10825 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10826
10827 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10828 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10829
10830 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10831 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10832
10833 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10834 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10835
10836 /*flash*/
10837 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10838 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10839
10840 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10841 settings.update(ANDROID_FLASH_FIRING_POWER,
10842 &flashFiringLevel, 1);
10843
10844 /* lens */
10845 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10846 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10847
10848 if (gCamCapability[mCameraId]->filter_densities_count) {
10849 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10850 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10851 gCamCapability[mCameraId]->filter_densities_count);
10852 }
10853
10854 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10855 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10856
Thierry Strudel3d639192016-09-09 11:52:26 -070010857 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10858 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10859
10860 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10861 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10862
10863 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10864 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10865
10866 /* face detection (default to OFF) */
10867 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10868 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10869
Thierry Strudel54dc9782017-02-15 12:12:10 -080010870 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10871 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010872
10873 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10874 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10875
10876 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10877 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10878
Thierry Strudel3d639192016-09-09 11:52:26 -070010879
10880 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10881 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10882
10883 /* Exposure time(Update the Min Exposure Time)*/
10884 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10885 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10886
10887 /* frame duration */
10888 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10889 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10890
10891 /* sensitivity */
10892 static const int32_t default_sensitivity = 100;
10893 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010894#ifndef USE_HAL_3_3
10895 static const int32_t default_isp_sensitivity =
10896 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10897 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10898#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010899
10900 /*edge mode*/
10901 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10902
10903 /*noise reduction mode*/
10904 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10905
10906 /*color correction mode*/
10907 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10908 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10909
10910 /*transform matrix mode*/
10911 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10912
10913 int32_t scaler_crop_region[4];
10914 scaler_crop_region[0] = 0;
10915 scaler_crop_region[1] = 0;
10916 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10917 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10918 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10919
10920 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10921 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10922
10923 /*focus distance*/
10924 float focus_distance = 0.0;
10925 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10926
10927 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010928 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010929 float max_range = 0.0;
10930 float max_fixed_fps = 0.0;
10931 int32_t fps_range[2] = {0, 0};
10932 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10933 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010934 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10935 TEMPLATE_MAX_PREVIEW_FPS) {
10936 continue;
10937 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010938 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10939 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10940 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10941 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10942 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10943 if (range > max_range) {
10944 fps_range[0] =
10945 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10946 fps_range[1] =
10947 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10948 max_range = range;
10949 }
10950 } else {
10951 if (range < 0.01 && max_fixed_fps <
10952 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10953 fps_range[0] =
10954 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10955 fps_range[1] =
10956 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10957 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10958 }
10959 }
10960 }
10961 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10962
10963 /*precapture trigger*/
10964 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10965 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10966
10967 /*af trigger*/
10968 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10969 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10970
10971 /* ae & af regions */
10972 int32_t active_region[] = {
10973 gCamCapability[mCameraId]->active_array_size.left,
10974 gCamCapability[mCameraId]->active_array_size.top,
10975 gCamCapability[mCameraId]->active_array_size.left +
10976 gCamCapability[mCameraId]->active_array_size.width,
10977 gCamCapability[mCameraId]->active_array_size.top +
10978 gCamCapability[mCameraId]->active_array_size.height,
10979 0};
10980 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10981 sizeof(active_region) / sizeof(active_region[0]));
10982 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10983 sizeof(active_region) / sizeof(active_region[0]));
10984
10985 /* black level lock */
10986 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10987 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10988
Thierry Strudel3d639192016-09-09 11:52:26 -070010989 //special defaults for manual template
10990 if (type == CAMERA3_TEMPLATE_MANUAL) {
10991 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10992 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10993
10994 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10995 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10996
10997 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10998 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10999
11000 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11001 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11002
11003 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11004 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11005
11006 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11007 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11008 }
11009
11010
11011 /* TNR
11012 * We'll use this location to determine which modes TNR will be set.
11013 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11014 * This is not to be confused with linking on a per stream basis that decision
11015 * is still on per-session basis and will be handled as part of config stream
11016 */
11017 uint8_t tnr_enable = 0;
11018
11019 if (m_bTnrPreview || m_bTnrVideo) {
11020
11021 switch (type) {
11022 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11023 tnr_enable = 1;
11024 break;
11025
11026 default:
11027 tnr_enable = 0;
11028 break;
11029 }
11030
11031 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11032 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11033 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11034
11035 LOGD("TNR:%d with process plate %d for template:%d",
11036 tnr_enable, tnr_process_type, type);
11037 }
11038
11039 //Update Link tags to default
11040 int32_t sync_type = CAM_TYPE_STANDALONE;
11041 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11042
11043 int32_t is_main = 0; //this doesn't matter as app should overwrite
11044 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11045
11046 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11047
11048 /* CDS default */
11049 char prop[PROPERTY_VALUE_MAX];
11050 memset(prop, 0, sizeof(prop));
11051 property_get("persist.camera.CDS", prop, "Auto");
11052 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11053 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11054 if (CAM_CDS_MODE_MAX == cds_mode) {
11055 cds_mode = CAM_CDS_MODE_AUTO;
11056 }
11057
11058 /* Disabling CDS in templates which have TNR enabled*/
11059 if (tnr_enable)
11060 cds_mode = CAM_CDS_MODE_OFF;
11061
11062 int32_t mode = cds_mode;
11063 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011064
Thierry Strudel269c81a2016-10-12 12:13:59 -070011065 /* Manual Convergence AEC Speed is disabled by default*/
11066 float default_aec_speed = 0;
11067 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11068
11069 /* Manual Convergence AWB Speed is disabled by default*/
11070 float default_awb_speed = 0;
11071 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11072
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011073 // Set instant AEC to normal convergence by default
11074 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11075 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11076
Shuzhen Wang19463d72016-03-08 11:09:52 -080011077 /* hybrid ae */
11078 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11079
Thierry Strudel3d639192016-09-09 11:52:26 -070011080 mDefaultMetadata[type] = settings.release();
11081
11082 return mDefaultMetadata[type];
11083}
11084
11085/*===========================================================================
11086 * FUNCTION : setFrameParameters
11087 *
11088 * DESCRIPTION: set parameters per frame as requested in the metadata from
11089 * framework
11090 *
11091 * PARAMETERS :
11092 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011093 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011094 * @blob_request: Whether this request is a blob request or not
11095 *
11096 * RETURN : success: NO_ERROR
11097 * failure:
11098 *==========================================================================*/
11099int QCamera3HardwareInterface::setFrameParameters(
11100 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011101 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 int blob_request,
11103 uint32_t snapshotStreamId)
11104{
11105 /*translate from camera_metadata_t type to parm_type_t*/
11106 int rc = 0;
11107 int32_t hal_version = CAM_HAL_V3;
11108
11109 clear_metadata_buffer(mParameters);
11110 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11111 LOGE("Failed to set hal version in the parameters");
11112 return BAD_VALUE;
11113 }
11114
11115 /*we need to update the frame number in the parameters*/
11116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11117 request->frame_number)) {
11118 LOGE("Failed to set the frame number in the parameters");
11119 return BAD_VALUE;
11120 }
11121
11122 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011124 LOGE("Failed to set stream type mask in the parameters");
11125 return BAD_VALUE;
11126 }
11127
11128 if (mUpdateDebugLevel) {
11129 uint32_t dummyDebugLevel = 0;
11130 /* The value of dummyDebugLevel is irrelavent. On
11131 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11133 dummyDebugLevel)) {
11134 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11135 return BAD_VALUE;
11136 }
11137 mUpdateDebugLevel = false;
11138 }
11139
11140 if(request->settings != NULL){
11141 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11142 if (blob_request)
11143 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11144 }
11145
11146 return rc;
11147}
11148
11149/*===========================================================================
11150 * FUNCTION : setReprocParameters
11151 *
11152 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11153 * return it.
11154 *
11155 * PARAMETERS :
11156 * @request : request that needs to be serviced
11157 *
11158 * RETURN : success: NO_ERROR
11159 * failure:
11160 *==========================================================================*/
11161int32_t QCamera3HardwareInterface::setReprocParameters(
11162 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11163 uint32_t snapshotStreamId)
11164{
11165 /*translate from camera_metadata_t type to parm_type_t*/
11166 int rc = 0;
11167
11168 if (NULL == request->settings){
11169 LOGE("Reprocess settings cannot be NULL");
11170 return BAD_VALUE;
11171 }
11172
11173 if (NULL == reprocParam) {
11174 LOGE("Invalid reprocessing metadata buffer");
11175 return BAD_VALUE;
11176 }
11177 clear_metadata_buffer(reprocParam);
11178
11179 /*we need to update the frame number in the parameters*/
11180 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11181 request->frame_number)) {
11182 LOGE("Failed to set the frame number in the parameters");
11183 return BAD_VALUE;
11184 }
11185
11186 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11187 if (rc < 0) {
11188 LOGE("Failed to translate reproc request");
11189 return rc;
11190 }
11191
11192 CameraMetadata frame_settings;
11193 frame_settings = request->settings;
11194 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11195 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11196 int32_t *crop_count =
11197 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11198 int32_t *crop_data =
11199 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11200 int32_t *roi_map =
11201 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11202 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11203 cam_crop_data_t crop_meta;
11204 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11205 crop_meta.num_of_streams = 1;
11206 crop_meta.crop_info[0].crop.left = crop_data[0];
11207 crop_meta.crop_info[0].crop.top = crop_data[1];
11208 crop_meta.crop_info[0].crop.width = crop_data[2];
11209 crop_meta.crop_info[0].crop.height = crop_data[3];
11210
11211 crop_meta.crop_info[0].roi_map.left =
11212 roi_map[0];
11213 crop_meta.crop_info[0].roi_map.top =
11214 roi_map[1];
11215 crop_meta.crop_info[0].roi_map.width =
11216 roi_map[2];
11217 crop_meta.crop_info[0].roi_map.height =
11218 roi_map[3];
11219
11220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11221 rc = BAD_VALUE;
11222 }
11223 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11224 request->input_buffer->stream,
11225 crop_meta.crop_info[0].crop.left,
11226 crop_meta.crop_info[0].crop.top,
11227 crop_meta.crop_info[0].crop.width,
11228 crop_meta.crop_info[0].crop.height);
11229 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11230 request->input_buffer->stream,
11231 crop_meta.crop_info[0].roi_map.left,
11232 crop_meta.crop_info[0].roi_map.top,
11233 crop_meta.crop_info[0].roi_map.width,
11234 crop_meta.crop_info[0].roi_map.height);
11235 } else {
11236 LOGE("Invalid reprocess crop count %d!", *crop_count);
11237 }
11238 } else {
11239 LOGE("No crop data from matching output stream");
11240 }
11241
11242 /* These settings are not needed for regular requests so handle them specially for
11243 reprocess requests; information needed for EXIF tags */
11244 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11245 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11246 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11247 if (NAME_NOT_FOUND != val) {
11248 uint32_t flashMode = (uint32_t)val;
11249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11250 rc = BAD_VALUE;
11251 }
11252 } else {
11253 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11254 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11255 }
11256 } else {
11257 LOGH("No flash mode in reprocess settings");
11258 }
11259
11260 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11261 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11263 rc = BAD_VALUE;
11264 }
11265 } else {
11266 LOGH("No flash state in reprocess settings");
11267 }
11268
11269 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11270 uint8_t *reprocessFlags =
11271 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11273 *reprocessFlags)) {
11274 rc = BAD_VALUE;
11275 }
11276 }
11277
Thierry Strudel54dc9782017-02-15 12:12:10 -080011278 // Add exif debug data to internal metadata
11279 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11280 mm_jpeg_debug_exif_params_t *debug_params =
11281 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11282 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11283 // AE
11284 if (debug_params->ae_debug_params_valid == TRUE) {
11285 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11286 debug_params->ae_debug_params);
11287 }
11288 // AWB
11289 if (debug_params->awb_debug_params_valid == TRUE) {
11290 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11291 debug_params->awb_debug_params);
11292 }
11293 // AF
11294 if (debug_params->af_debug_params_valid == TRUE) {
11295 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11296 debug_params->af_debug_params);
11297 }
11298 // ASD
11299 if (debug_params->asd_debug_params_valid == TRUE) {
11300 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11301 debug_params->asd_debug_params);
11302 }
11303 // Stats
11304 if (debug_params->stats_debug_params_valid == TRUE) {
11305 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11306 debug_params->stats_debug_params);
11307 }
11308 // BE Stats
11309 if (debug_params->bestats_debug_params_valid == TRUE) {
11310 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11311 debug_params->bestats_debug_params);
11312 }
11313 // BHIST
11314 if (debug_params->bhist_debug_params_valid == TRUE) {
11315 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11316 debug_params->bhist_debug_params);
11317 }
11318 // 3A Tuning
11319 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11320 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11321 debug_params->q3a_tuning_debug_params);
11322 }
11323 }
11324
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011325 // Add metadata which reprocess needs
11326 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11327 cam_reprocess_info_t *repro_info =
11328 (cam_reprocess_info_t *)frame_settings.find
11329 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011330 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011331 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011332 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011333 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011334 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011335 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011336 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011337 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011338 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011339 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011340 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011341 repro_info->pipeline_flip);
11342 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11343 repro_info->af_roi);
11344 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11345 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011346 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11347 CAM_INTF_PARM_ROTATION metadata then has been added in
11348 translateToHalMetadata. HAL need to keep this new rotation
11349 metadata. Otherwise, the old rotation info saved in the vendor tag
11350 would be used */
11351 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11352 CAM_INTF_PARM_ROTATION, reprocParam) {
11353 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11354 } else {
11355 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011356 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011358 }
11359
11360 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11361 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11362 roi.width and roi.height would be the final JPEG size.
11363 For now, HAL only checks this for reprocess request */
11364 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11365 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11366 uint8_t *enable =
11367 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11368 if (*enable == TRUE) {
11369 int32_t *crop_data =
11370 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11371 cam_stream_crop_info_t crop_meta;
11372 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11373 crop_meta.stream_id = 0;
11374 crop_meta.crop.left = crop_data[0];
11375 crop_meta.crop.top = crop_data[1];
11376 crop_meta.crop.width = crop_data[2];
11377 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011378 // The JPEG crop roi should match cpp output size
11379 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11380 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11381 crop_meta.roi_map.left = 0;
11382 crop_meta.roi_map.top = 0;
11383 crop_meta.roi_map.width = cpp_crop->crop.width;
11384 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011385 }
11386 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11387 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011388 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011390 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11391 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011392 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011393 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11394
11395 // Add JPEG scale information
11396 cam_dimension_t scale_dim;
11397 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11398 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11399 int32_t *roi =
11400 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11401 scale_dim.width = roi[2];
11402 scale_dim.height = roi[3];
11403 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11404 scale_dim);
11405 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11406 scale_dim.width, scale_dim.height, mCameraId);
11407 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011408 }
11409 }
11410
11411 return rc;
11412}
11413
11414/*===========================================================================
11415 * FUNCTION : saveRequestSettings
11416 *
11417 * DESCRIPTION: Add any settings that might have changed to the request settings
11418 * and save the settings to be applied on the frame
11419 *
11420 * PARAMETERS :
11421 * @jpegMetadata : the extracted and/or modified jpeg metadata
11422 * @request : request with initial settings
11423 *
11424 * RETURN :
11425 * camera_metadata_t* : pointer to the saved request settings
11426 *==========================================================================*/
11427camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11428 const CameraMetadata &jpegMetadata,
11429 camera3_capture_request_t *request)
11430{
11431 camera_metadata_t *resultMetadata;
11432 CameraMetadata camMetadata;
11433 camMetadata = request->settings;
11434
11435 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11436 int32_t thumbnail_size[2];
11437 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11438 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11439 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11440 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11441 }
11442
11443 if (request->input_buffer != NULL) {
11444 uint8_t reprocessFlags = 1;
11445 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11446 (uint8_t*)&reprocessFlags,
11447 sizeof(reprocessFlags));
11448 }
11449
11450 resultMetadata = camMetadata.release();
11451 return resultMetadata;
11452}
11453
11454/*===========================================================================
11455 * FUNCTION : setHalFpsRange
11456 *
11457 * DESCRIPTION: set FPS range parameter
11458 *
11459 *
11460 * PARAMETERS :
11461 * @settings : Metadata from framework
11462 * @hal_metadata: Metadata buffer
11463 *
11464 *
11465 * RETURN : success: NO_ERROR
11466 * failure:
11467 *==========================================================================*/
11468int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11469 metadata_buffer_t *hal_metadata)
11470{
11471 int32_t rc = NO_ERROR;
11472 cam_fps_range_t fps_range;
11473 fps_range.min_fps = (float)
11474 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11475 fps_range.max_fps = (float)
11476 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11477 fps_range.video_min_fps = fps_range.min_fps;
11478 fps_range.video_max_fps = fps_range.max_fps;
11479
11480 LOGD("aeTargetFpsRange fps: [%f %f]",
11481 fps_range.min_fps, fps_range.max_fps);
11482 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11483 * follows:
11484 * ---------------------------------------------------------------|
11485 * Video stream is absent in configure_streams |
11486 * (Camcorder preview before the first video record |
11487 * ---------------------------------------------------------------|
11488 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11489 * | | | vid_min/max_fps|
11490 * ---------------------------------------------------------------|
11491 * NO | [ 30, 240] | 240 | [240, 240] |
11492 * |-------------|-------------|----------------|
11493 * | [240, 240] | 240 | [240, 240] |
11494 * ---------------------------------------------------------------|
11495 * Video stream is present in configure_streams |
11496 * ---------------------------------------------------------------|
11497 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11498 * | | | vid_min/max_fps|
11499 * ---------------------------------------------------------------|
11500 * NO | [ 30, 240] | 240 | [240, 240] |
11501 * (camcorder prev |-------------|-------------|----------------|
11502 * after video rec | [240, 240] | 240 | [240, 240] |
11503 * is stopped) | | | |
11504 * ---------------------------------------------------------------|
11505 * YES | [ 30, 240] | 240 | [240, 240] |
11506 * |-------------|-------------|----------------|
11507 * | [240, 240] | 240 | [240, 240] |
11508 * ---------------------------------------------------------------|
11509 * When Video stream is absent in configure_streams,
11510 * preview fps = sensor_fps / batchsize
11511 * Eg: for 240fps at batchSize 4, preview = 60fps
11512 * for 120fps at batchSize 4, preview = 30fps
11513 *
11514 * When video stream is present in configure_streams, preview fps is as per
11515 * the ratio of preview buffers to video buffers requested in process
11516 * capture request
11517 */
11518 mBatchSize = 0;
11519 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11520 fps_range.min_fps = fps_range.video_max_fps;
11521 fps_range.video_min_fps = fps_range.video_max_fps;
11522 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11523 fps_range.max_fps);
11524 if (NAME_NOT_FOUND != val) {
11525 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11527 return BAD_VALUE;
11528 }
11529
11530 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11531 /* If batchmode is currently in progress and the fps changes,
11532 * set the flag to restart the sensor */
11533 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11534 (mHFRVideoFps != fps_range.max_fps)) {
11535 mNeedSensorRestart = true;
11536 }
11537 mHFRVideoFps = fps_range.max_fps;
11538 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11539 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11540 mBatchSize = MAX_HFR_BATCH_SIZE;
11541 }
11542 }
11543 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11544
11545 }
11546 } else {
11547 /* HFR mode is session param in backend/ISP. This should be reset when
11548 * in non-HFR mode */
11549 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11550 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11551 return BAD_VALUE;
11552 }
11553 }
11554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11555 return BAD_VALUE;
11556 }
11557 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11558 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11559 return rc;
11560}
11561
11562/*===========================================================================
11563 * FUNCTION : translateToHalMetadata
11564 *
11565 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11566 *
11567 *
11568 * PARAMETERS :
11569 * @request : request sent from framework
11570 *
11571 *
11572 * RETURN : success: NO_ERROR
11573 * failure:
11574 *==========================================================================*/
11575int QCamera3HardwareInterface::translateToHalMetadata
11576 (const camera3_capture_request_t *request,
11577 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011578 uint32_t snapshotStreamId) {
11579 if (request == nullptr || hal_metadata == nullptr) {
11580 return BAD_VALUE;
11581 }
11582
11583 int64_t minFrameDuration = getMinFrameDuration(request);
11584
11585 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11586 minFrameDuration);
11587}
11588
11589int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11590 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11591 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11592
Thierry Strudel3d639192016-09-09 11:52:26 -070011593 int rc = 0;
11594 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011595 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011596
11597 /* Do not change the order of the following list unless you know what you are
11598 * doing.
11599 * The order is laid out in such a way that parameters in the front of the table
11600 * may be used to override the parameters later in the table. Examples are:
11601 * 1. META_MODE should precede AEC/AWB/AF MODE
11602 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11603 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11604 * 4. Any mode should precede it's corresponding settings
11605 */
11606 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11607 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11609 rc = BAD_VALUE;
11610 }
11611 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11612 if (rc != NO_ERROR) {
11613 LOGE("extractSceneMode failed");
11614 }
11615 }
11616
11617 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11618 uint8_t fwk_aeMode =
11619 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11620 uint8_t aeMode;
11621 int32_t redeye;
11622
11623 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11624 aeMode = CAM_AE_MODE_OFF;
11625 } else {
11626 aeMode = CAM_AE_MODE_ON;
11627 }
11628 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11629 redeye = 1;
11630 } else {
11631 redeye = 0;
11632 }
11633
11634 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11635 fwk_aeMode);
11636 if (NAME_NOT_FOUND != val) {
11637 int32_t flashMode = (int32_t)val;
11638 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11639 }
11640
11641 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11643 rc = BAD_VALUE;
11644 }
11645 }
11646
11647 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11648 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11649 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11650 fwk_whiteLevel);
11651 if (NAME_NOT_FOUND != val) {
11652 uint8_t whiteLevel = (uint8_t)val;
11653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11654 rc = BAD_VALUE;
11655 }
11656 }
11657 }
11658
11659 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11660 uint8_t fwk_cacMode =
11661 frame_settings.find(
11662 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11663 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11664 fwk_cacMode);
11665 if (NAME_NOT_FOUND != val) {
11666 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11667 bool entryAvailable = FALSE;
11668 // Check whether Frameworks set CAC mode is supported in device or not
11669 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11670 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11671 entryAvailable = TRUE;
11672 break;
11673 }
11674 }
11675 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11676 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11677 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11678 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11679 if (entryAvailable == FALSE) {
11680 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11681 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11682 } else {
11683 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11684 // High is not supported and so set the FAST as spec say's underlying
11685 // device implementation can be the same for both modes.
11686 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11687 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11688 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11689 // in order to avoid the fps drop due to high quality
11690 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11691 } else {
11692 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11693 }
11694 }
11695 }
11696 LOGD("Final cacMode is %d", cacMode);
11697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11698 rc = BAD_VALUE;
11699 }
11700 } else {
11701 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11702 }
11703 }
11704
Thierry Strudel2896d122017-02-23 19:18:03 -080011705 char af_value[PROPERTY_VALUE_MAX];
11706 property_get("persist.camera.af.infinity", af_value, "0");
11707
Jason Lee84ae9972017-02-24 13:24:24 -080011708 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011709 if (atoi(af_value) == 0) {
11710 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011711 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011712 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11713 fwk_focusMode);
11714 if (NAME_NOT_FOUND != val) {
11715 uint8_t focusMode = (uint8_t)val;
11716 LOGD("set focus mode %d", focusMode);
11717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11718 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11719 rc = BAD_VALUE;
11720 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011721 }
11722 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011723 } else {
11724 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11725 LOGE("Focus forced to infinity %d", focusMode);
11726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11727 rc = BAD_VALUE;
11728 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011729 }
11730
Jason Lee84ae9972017-02-24 13:24:24 -080011731 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11732 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011733 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11735 focalDistance)) {
11736 rc = BAD_VALUE;
11737 }
11738 }
11739
11740 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11741 uint8_t fwk_antibandingMode =
11742 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11743 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11744 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11745 if (NAME_NOT_FOUND != val) {
11746 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011747 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11748 if (m60HzZone) {
11749 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11750 } else {
11751 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11752 }
11753 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11755 hal_antibandingMode)) {
11756 rc = BAD_VALUE;
11757 }
11758 }
11759 }
11760
11761 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11762 int32_t expCompensation = frame_settings.find(
11763 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11764 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11765 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11766 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11767 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011768 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11770 expCompensation)) {
11771 rc = BAD_VALUE;
11772 }
11773 }
11774
11775 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11776 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11778 rc = BAD_VALUE;
11779 }
11780 }
11781 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11782 rc = setHalFpsRange(frame_settings, hal_metadata);
11783 if (rc != NO_ERROR) {
11784 LOGE("setHalFpsRange failed");
11785 }
11786 }
11787
11788 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11789 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11791 rc = BAD_VALUE;
11792 }
11793 }
11794
11795 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11796 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11797 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11798 fwk_effectMode);
11799 if (NAME_NOT_FOUND != val) {
11800 uint8_t effectMode = (uint8_t)val;
11801 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11802 rc = BAD_VALUE;
11803 }
11804 }
11805 }
11806
11807 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11808 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11809 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11810 colorCorrectMode)) {
11811 rc = BAD_VALUE;
11812 }
11813 }
11814
11815 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11816 cam_color_correct_gains_t colorCorrectGains;
11817 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11818 colorCorrectGains.gains[i] =
11819 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11820 }
11821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11822 colorCorrectGains)) {
11823 rc = BAD_VALUE;
11824 }
11825 }
11826
11827 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11828 cam_color_correct_matrix_t colorCorrectTransform;
11829 cam_rational_type_t transform_elem;
11830 size_t num = 0;
11831 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11832 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11833 transform_elem.numerator =
11834 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11835 transform_elem.denominator =
11836 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11837 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11838 num++;
11839 }
11840 }
11841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11842 colorCorrectTransform)) {
11843 rc = BAD_VALUE;
11844 }
11845 }
11846
11847 cam_trigger_t aecTrigger;
11848 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11849 aecTrigger.trigger_id = -1;
11850 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11851 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11852 aecTrigger.trigger =
11853 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11854 aecTrigger.trigger_id =
11855 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11857 aecTrigger)) {
11858 rc = BAD_VALUE;
11859 }
11860 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11861 aecTrigger.trigger, aecTrigger.trigger_id);
11862 }
11863
11864 /*af_trigger must come with a trigger id*/
11865 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11866 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11867 cam_trigger_t af_trigger;
11868 af_trigger.trigger =
11869 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11870 af_trigger.trigger_id =
11871 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11872 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11873 rc = BAD_VALUE;
11874 }
11875 LOGD("AfTrigger: %d AfTriggerID: %d",
11876 af_trigger.trigger, af_trigger.trigger_id);
11877 }
11878
11879 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11880 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11882 rc = BAD_VALUE;
11883 }
11884 }
11885 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11886 cam_edge_application_t edge_application;
11887 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011888
Thierry Strudel3d639192016-09-09 11:52:26 -070011889 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11890 edge_application.sharpness = 0;
11891 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011892 edge_application.sharpness =
11893 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11894 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11895 int32_t sharpness =
11896 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11897 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11898 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11899 LOGD("Setting edge mode sharpness %d", sharpness);
11900 edge_application.sharpness = sharpness;
11901 }
11902 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011903 }
11904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11905 rc = BAD_VALUE;
11906 }
11907 }
11908
11909 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11910 int32_t respectFlashMode = 1;
11911 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11912 uint8_t fwk_aeMode =
11913 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11914 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
11915 respectFlashMode = 0;
11916 LOGH("AE Mode controls flash, ignore android.flash.mode");
11917 }
11918 }
11919 if (respectFlashMode) {
11920 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11921 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11922 LOGH("flash mode after mapping %d", val);
11923 // To check: CAM_INTF_META_FLASH_MODE usage
11924 if (NAME_NOT_FOUND != val) {
11925 uint8_t flashMode = (uint8_t)val;
11926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11927 rc = BAD_VALUE;
11928 }
11929 }
11930 }
11931 }
11932
11933 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11934 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11936 rc = BAD_VALUE;
11937 }
11938 }
11939
11940 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11941 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11943 flashFiringTime)) {
11944 rc = BAD_VALUE;
11945 }
11946 }
11947
11948 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
11949 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
11951 hotPixelMode)) {
11952 rc = BAD_VALUE;
11953 }
11954 }
11955
11956 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
11957 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
11958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
11959 lensAperture)) {
11960 rc = BAD_VALUE;
11961 }
11962 }
11963
11964 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
11965 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
11966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
11967 filterDensity)) {
11968 rc = BAD_VALUE;
11969 }
11970 }
11971
11972 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
11973 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
11975 focalLength)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979
11980 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
11981 uint8_t optStabMode =
11982 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
11983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
11984 optStabMode)) {
11985 rc = BAD_VALUE;
11986 }
11987 }
11988
11989 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11990 uint8_t videoStabMode =
11991 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11992 LOGD("videoStabMode from APP = %d", videoStabMode);
11993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11994 videoStabMode)) {
11995 rc = BAD_VALUE;
11996 }
11997 }
11998
11999
12000 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12001 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12003 noiseRedMode)) {
12004 rc = BAD_VALUE;
12005 }
12006 }
12007
12008 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12009 float reprocessEffectiveExposureFactor =
12010 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12012 reprocessEffectiveExposureFactor)) {
12013 rc = BAD_VALUE;
12014 }
12015 }
12016
12017 cam_crop_region_t scalerCropRegion;
12018 bool scalerCropSet = false;
12019 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12020 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12021 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12022 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12023 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12024
12025 // Map coordinate system from active array to sensor output.
12026 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12027 scalerCropRegion.width, scalerCropRegion.height);
12028
12029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12030 scalerCropRegion)) {
12031 rc = BAD_VALUE;
12032 }
12033 scalerCropSet = true;
12034 }
12035
12036 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12037 int64_t sensorExpTime =
12038 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12039 LOGD("setting sensorExpTime %lld", sensorExpTime);
12040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12041 sensorExpTime)) {
12042 rc = BAD_VALUE;
12043 }
12044 }
12045
12046 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12047 int64_t sensorFrameDuration =
12048 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012049 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12050 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12051 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12052 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12053 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12054 sensorFrameDuration)) {
12055 rc = BAD_VALUE;
12056 }
12057 }
12058
12059 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12060 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12061 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12062 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12063 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12064 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12065 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12067 sensorSensitivity)) {
12068 rc = BAD_VALUE;
12069 }
12070 }
12071
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012072#ifndef USE_HAL_3_3
12073 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12074 int32_t ispSensitivity =
12075 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12076 if (ispSensitivity <
12077 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12078 ispSensitivity =
12079 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12080 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12081 }
12082 if (ispSensitivity >
12083 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12084 ispSensitivity =
12085 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12086 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12087 }
12088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12089 ispSensitivity)) {
12090 rc = BAD_VALUE;
12091 }
12092 }
12093#endif
12094
Thierry Strudel3d639192016-09-09 11:52:26 -070012095 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12096 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101
12102 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12103 uint8_t fwk_facedetectMode =
12104 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12105
12106 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12107 fwk_facedetectMode);
12108
12109 if (NAME_NOT_FOUND != val) {
12110 uint8_t facedetectMode = (uint8_t)val;
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12112 facedetectMode)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116 }
12117
Thierry Strudel54dc9782017-02-15 12:12:10 -080012118 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012119 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012120 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12122 histogramMode)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126
12127 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12128 uint8_t sharpnessMapMode =
12129 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12131 sharpnessMapMode)) {
12132 rc = BAD_VALUE;
12133 }
12134 }
12135
12136 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12137 uint8_t tonemapMode =
12138 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12139 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12140 rc = BAD_VALUE;
12141 }
12142 }
12143 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12144 /*All tonemap channels will have the same number of points*/
12145 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12146 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12147 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12148 cam_rgb_tonemap_curves tonemapCurves;
12149 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12150 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12151 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12152 tonemapCurves.tonemap_points_cnt,
12153 CAM_MAX_TONEMAP_CURVE_SIZE);
12154 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12155 }
12156
12157 /* ch0 = G*/
12158 size_t point = 0;
12159 cam_tonemap_curve_t tonemapCurveGreen;
12160 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12161 for (size_t j = 0; j < 2; j++) {
12162 tonemapCurveGreen.tonemap_points[i][j] =
12163 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12164 point++;
12165 }
12166 }
12167 tonemapCurves.curves[0] = tonemapCurveGreen;
12168
12169 /* ch 1 = B */
12170 point = 0;
12171 cam_tonemap_curve_t tonemapCurveBlue;
12172 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12173 for (size_t j = 0; j < 2; j++) {
12174 tonemapCurveBlue.tonemap_points[i][j] =
12175 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12176 point++;
12177 }
12178 }
12179 tonemapCurves.curves[1] = tonemapCurveBlue;
12180
12181 /* ch 2 = R */
12182 point = 0;
12183 cam_tonemap_curve_t tonemapCurveRed;
12184 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12185 for (size_t j = 0; j < 2; j++) {
12186 tonemapCurveRed.tonemap_points[i][j] =
12187 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12188 point++;
12189 }
12190 }
12191 tonemapCurves.curves[2] = tonemapCurveRed;
12192
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12194 tonemapCurves)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12200 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12202 captureIntent)) {
12203 rc = BAD_VALUE;
12204 }
12205 }
12206
12207 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12208 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12210 blackLevelLock)) {
12211 rc = BAD_VALUE;
12212 }
12213 }
12214
12215 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12216 uint8_t lensShadingMapMode =
12217 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12219 lensShadingMapMode)) {
12220 rc = BAD_VALUE;
12221 }
12222 }
12223
12224 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12225 cam_area_t roi;
12226 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012227 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012228
12229 // Map coordinate system from active array to sensor output.
12230 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12231 roi.rect.height);
12232
12233 if (scalerCropSet) {
12234 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12235 }
12236 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12237 rc = BAD_VALUE;
12238 }
12239 }
12240
12241 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12242 cam_area_t roi;
12243 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012244 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012245
12246 // Map coordinate system from active array to sensor output.
12247 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12248 roi.rect.height);
12249
12250 if (scalerCropSet) {
12251 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12252 }
12253 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12254 rc = BAD_VALUE;
12255 }
12256 }
12257
12258 // CDS for non-HFR non-video mode
12259 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12260 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12261 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12262 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12263 LOGE("Invalid CDS mode %d!", *fwk_cds);
12264 } else {
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12266 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270 }
12271
Thierry Strudel04e026f2016-10-10 11:27:36 -070012272 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012273 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012274 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012275 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12276 }
12277 if (m_bVideoHdrEnabled)
12278 vhdr = CAM_VIDEO_HDR_MODE_ON;
12279
Thierry Strudel54dc9782017-02-15 12:12:10 -080012280 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12281
12282 if(vhdr != curr_hdr_state)
12283 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12284
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012285 rc = setVideoHdrMode(mParameters, vhdr);
12286 if (rc != NO_ERROR) {
12287 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012288 }
12289
12290 //IR
12291 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12292 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12293 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012294 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12295 uint8_t isIRon = 0;
12296
12297 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012298 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12299 LOGE("Invalid IR mode %d!", fwk_ir);
12300 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012301 if(isIRon != curr_ir_state )
12302 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12303
Thierry Strudel04e026f2016-10-10 11:27:36 -070012304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12305 CAM_INTF_META_IR_MODE, fwk_ir)) {
12306 rc = BAD_VALUE;
12307 }
12308 }
12309 }
12310
Thierry Strudel54dc9782017-02-15 12:12:10 -080012311 //Binning Correction Mode
12312 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12313 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12314 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12315 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12316 || (0 > fwk_binning_correction)) {
12317 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12318 } else {
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12320 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324 }
12325
Thierry Strudel269c81a2016-10-12 12:13:59 -070012326 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12327 float aec_speed;
12328 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12329 LOGD("AEC Speed :%f", aec_speed);
12330 if ( aec_speed < 0 ) {
12331 LOGE("Invalid AEC mode %f!", aec_speed);
12332 } else {
12333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12334 aec_speed)) {
12335 rc = BAD_VALUE;
12336 }
12337 }
12338 }
12339
12340 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12341 float awb_speed;
12342 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12343 LOGD("AWB Speed :%f", awb_speed);
12344 if ( awb_speed < 0 ) {
12345 LOGE("Invalid AWB mode %f!", awb_speed);
12346 } else {
12347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12348 awb_speed)) {
12349 rc = BAD_VALUE;
12350 }
12351 }
12352 }
12353
Thierry Strudel3d639192016-09-09 11:52:26 -070012354 // TNR
12355 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12356 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12357 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012358 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012359 cam_denoise_param_t tnr;
12360 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12361 tnr.process_plates =
12362 (cam_denoise_process_type_t)frame_settings.find(
12363 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12364 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012365
12366 if(b_TnrRequested != curr_tnr_state)
12367 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12368
Thierry Strudel3d639192016-09-09 11:52:26 -070012369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12370 rc = BAD_VALUE;
12371 }
12372 }
12373
Thierry Strudel54dc9782017-02-15 12:12:10 -080012374 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012375 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012376 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12378 *exposure_metering_mode)) {
12379 rc = BAD_VALUE;
12380 }
12381 }
12382
Thierry Strudel3d639192016-09-09 11:52:26 -070012383 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12384 int32_t fwk_testPatternMode =
12385 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12386 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12387 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12388
12389 if (NAME_NOT_FOUND != testPatternMode) {
12390 cam_test_pattern_data_t testPatternData;
12391 memset(&testPatternData, 0, sizeof(testPatternData));
12392 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12393 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12394 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12395 int32_t *fwk_testPatternData =
12396 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12397 testPatternData.r = fwk_testPatternData[0];
12398 testPatternData.b = fwk_testPatternData[3];
12399 switch (gCamCapability[mCameraId]->color_arrangement) {
12400 case CAM_FILTER_ARRANGEMENT_RGGB:
12401 case CAM_FILTER_ARRANGEMENT_GRBG:
12402 testPatternData.gr = fwk_testPatternData[1];
12403 testPatternData.gb = fwk_testPatternData[2];
12404 break;
12405 case CAM_FILTER_ARRANGEMENT_GBRG:
12406 case CAM_FILTER_ARRANGEMENT_BGGR:
12407 testPatternData.gr = fwk_testPatternData[2];
12408 testPatternData.gb = fwk_testPatternData[1];
12409 break;
12410 default:
12411 LOGE("color arrangement %d is not supported",
12412 gCamCapability[mCameraId]->color_arrangement);
12413 break;
12414 }
12415 }
12416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12417 testPatternData)) {
12418 rc = BAD_VALUE;
12419 }
12420 } else {
12421 LOGE("Invalid framework sensor test pattern mode %d",
12422 fwk_testPatternMode);
12423 }
12424 }
12425
12426 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12427 size_t count = 0;
12428 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12429 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12430 gps_coords.data.d, gps_coords.count, count);
12431 if (gps_coords.count != count) {
12432 rc = BAD_VALUE;
12433 }
12434 }
12435
12436 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12437 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12438 size_t count = 0;
12439 const char *gps_methods_src = (const char *)
12440 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12441 memset(gps_methods, '\0', sizeof(gps_methods));
12442 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12443 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12444 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12445 if (GPS_PROCESSING_METHOD_SIZE != count) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12451 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12453 gps_timestamp)) {
12454 rc = BAD_VALUE;
12455 }
12456 }
12457
12458 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12459 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12460 cam_rotation_info_t rotation_info;
12461 if (orientation == 0) {
12462 rotation_info.rotation = ROTATE_0;
12463 } else if (orientation == 90) {
12464 rotation_info.rotation = ROTATE_90;
12465 } else if (orientation == 180) {
12466 rotation_info.rotation = ROTATE_180;
12467 } else if (orientation == 270) {
12468 rotation_info.rotation = ROTATE_270;
12469 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012470 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012471 rotation_info.streamId = snapshotStreamId;
12472 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12479 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12481 rc = BAD_VALUE;
12482 }
12483 }
12484
12485 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12486 uint32_t thumb_quality = (uint32_t)
12487 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12488 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12489 thumb_quality)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12495 cam_dimension_t dim;
12496 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12497 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12499 rc = BAD_VALUE;
12500 }
12501 }
12502
12503 // Internal metadata
12504 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12505 size_t count = 0;
12506 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12507 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12508 privatedata.data.i32, privatedata.count, count);
12509 if (privatedata.count != count) {
12510 rc = BAD_VALUE;
12511 }
12512 }
12513
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012514 // ISO/Exposure Priority
12515 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12516 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12517 cam_priority_mode_t mode =
12518 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12519 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12520 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12521 use_iso_exp_pty.previewOnly = FALSE;
12522 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12523 use_iso_exp_pty.value = *ptr;
12524
12525 if(CAM_ISO_PRIORITY == mode) {
12526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12527 use_iso_exp_pty)) {
12528 rc = BAD_VALUE;
12529 }
12530 }
12531 else {
12532 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12533 use_iso_exp_pty)) {
12534 rc = BAD_VALUE;
12535 }
12536 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012537
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12539 rc = BAD_VALUE;
12540 }
12541 }
12542 } else {
12543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12544 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012545 }
12546 }
12547
12548 // Saturation
12549 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12550 int32_t* use_saturation =
12551 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
Thierry Strudel3d639192016-09-09 11:52:26 -070012557 // EV step
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12559 gCamCapability[mCameraId]->exp_compensation_step)) {
12560 rc = BAD_VALUE;
12561 }
12562
12563 // CDS info
12564 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12565 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12566 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12567
12568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12569 CAM_INTF_META_CDS_DATA, *cdsData)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573
Shuzhen Wang19463d72016-03-08 11:09:52 -080012574 // Hybrid AE
12575 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12576 uint8_t *hybrid_ae = (uint8_t *)
12577 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12578
12579 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12580 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12581 rc = BAD_VALUE;
12582 }
12583 }
12584
Shuzhen Wang14415f52016-11-16 18:26:18 -080012585 // Histogram
12586 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12587 uint8_t histogramMode =
12588 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12590 histogramMode)) {
12591 rc = BAD_VALUE;
12592 }
12593 }
12594
12595 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12596 int32_t histogramBins =
12597 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12599 histogramBins)) {
12600 rc = BAD_VALUE;
12601 }
12602 }
12603
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012604 // Tracking AF
12605 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12606 uint8_t trackingAfTrigger =
12607 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12609 trackingAfTrigger)) {
12610 rc = BAD_VALUE;
12611 }
12612 }
12613
Thierry Strudel3d639192016-09-09 11:52:26 -070012614 return rc;
12615}
12616
12617/*===========================================================================
12618 * FUNCTION : captureResultCb
12619 *
12620 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12621 *
12622 * PARAMETERS :
12623 * @frame : frame information from mm-camera-interface
12624 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12625 * @userdata: userdata
12626 *
12627 * RETURN : NONE
12628 *==========================================================================*/
12629void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12630 camera3_stream_buffer_t *buffer,
12631 uint32_t frame_number, bool isInputBuffer, void *userdata)
12632{
12633 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12634 if (hw == NULL) {
12635 LOGE("Invalid hw %p", hw);
12636 return;
12637 }
12638
12639 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12640 return;
12641}
12642
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012643/*===========================================================================
12644 * FUNCTION : setBufferErrorStatus
12645 *
12646 * DESCRIPTION: Callback handler for channels to report any buffer errors
12647 *
12648 * PARAMETERS :
12649 * @ch : Channel on which buffer error is reported from
12650 * @frame_number : frame number on which buffer error is reported on
12651 * @buffer_status : buffer error status
12652 * @userdata: userdata
12653 *
12654 * RETURN : NONE
12655 *==========================================================================*/
12656void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12657 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12658{
12659 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12660 if (hw == NULL) {
12661 LOGE("Invalid hw %p", hw);
12662 return;
12663 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012664
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012665 hw->setBufferErrorStatus(ch, frame_number, err);
12666 return;
12667}
12668
12669void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12670 uint32_t frameNumber, camera3_buffer_status_t err)
12671{
12672 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12673 pthread_mutex_lock(&mMutex);
12674
12675 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12676 if (req.frame_number != frameNumber)
12677 continue;
12678 for (auto& k : req.mPendingBufferList) {
12679 if(k.stream->priv == ch) {
12680 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12681 }
12682 }
12683 }
12684
12685 pthread_mutex_unlock(&mMutex);
12686 return;
12687}
Thierry Strudel3d639192016-09-09 11:52:26 -070012688/*===========================================================================
12689 * FUNCTION : initialize
12690 *
12691 * DESCRIPTION: Pass framework callback pointers to HAL
12692 *
12693 * PARAMETERS :
12694 *
12695 *
12696 * RETURN : Success : 0
12697 * Failure: -ENODEV
12698 *==========================================================================*/
12699
12700int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12701 const camera3_callback_ops_t *callback_ops)
12702{
12703 LOGD("E");
12704 QCamera3HardwareInterface *hw =
12705 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12706 if (!hw) {
12707 LOGE("NULL camera device");
12708 return -ENODEV;
12709 }
12710
12711 int rc = hw->initialize(callback_ops);
12712 LOGD("X");
12713 return rc;
12714}
12715
12716/*===========================================================================
12717 * FUNCTION : configure_streams
12718 *
12719 * DESCRIPTION:
12720 *
12721 * PARAMETERS :
12722 *
12723 *
12724 * RETURN : Success: 0
12725 * Failure: -EINVAL (if stream configuration is invalid)
12726 * -ENODEV (fatal error)
12727 *==========================================================================*/
12728
12729int QCamera3HardwareInterface::configure_streams(
12730 const struct camera3_device *device,
12731 camera3_stream_configuration_t *stream_list)
12732{
12733 LOGD("E");
12734 QCamera3HardwareInterface *hw =
12735 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12736 if (!hw) {
12737 LOGE("NULL camera device");
12738 return -ENODEV;
12739 }
12740 int rc = hw->configureStreams(stream_list);
12741 LOGD("X");
12742 return rc;
12743}
12744
12745/*===========================================================================
12746 * FUNCTION : construct_default_request_settings
12747 *
12748 * DESCRIPTION: Configure a settings buffer to meet the required use case
12749 *
12750 * PARAMETERS :
12751 *
12752 *
12753 * RETURN : Success: Return valid metadata
12754 * Failure: Return NULL
12755 *==========================================================================*/
12756const camera_metadata_t* QCamera3HardwareInterface::
12757 construct_default_request_settings(const struct camera3_device *device,
12758 int type)
12759{
12760
12761 LOGD("E");
12762 camera_metadata_t* fwk_metadata = NULL;
12763 QCamera3HardwareInterface *hw =
12764 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12765 if (!hw) {
12766 LOGE("NULL camera device");
12767 return NULL;
12768 }
12769
12770 fwk_metadata = hw->translateCapabilityToMetadata(type);
12771
12772 LOGD("X");
12773 return fwk_metadata;
12774}
12775
12776/*===========================================================================
12777 * FUNCTION : process_capture_request
12778 *
12779 * DESCRIPTION:
12780 *
12781 * PARAMETERS :
12782 *
12783 *
12784 * RETURN :
12785 *==========================================================================*/
12786int QCamera3HardwareInterface::process_capture_request(
12787 const struct camera3_device *device,
12788 camera3_capture_request_t *request)
12789{
12790 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012791 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012792 QCamera3HardwareInterface *hw =
12793 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12794 if (!hw) {
12795 LOGE("NULL camera device");
12796 return -EINVAL;
12797 }
12798
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012799 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012800 LOGD("X");
12801 return rc;
12802}
12803
12804/*===========================================================================
12805 * FUNCTION : dump
12806 *
12807 * DESCRIPTION:
12808 *
12809 * PARAMETERS :
12810 *
12811 *
12812 * RETURN :
12813 *==========================================================================*/
12814
12815void QCamera3HardwareInterface::dump(
12816 const struct camera3_device *device, int fd)
12817{
12818 /* Log level property is read when "adb shell dumpsys media.camera" is
12819 called so that the log level can be controlled without restarting
12820 the media server */
12821 getLogLevel();
12822
12823 LOGD("E");
12824 QCamera3HardwareInterface *hw =
12825 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12826 if (!hw) {
12827 LOGE("NULL camera device");
12828 return;
12829 }
12830
12831 hw->dump(fd);
12832 LOGD("X");
12833 return;
12834}
12835
12836/*===========================================================================
12837 * FUNCTION : flush
12838 *
12839 * DESCRIPTION:
12840 *
12841 * PARAMETERS :
12842 *
12843 *
12844 * RETURN :
12845 *==========================================================================*/
12846
12847int QCamera3HardwareInterface::flush(
12848 const struct camera3_device *device)
12849{
12850 int rc;
12851 LOGD("E");
12852 QCamera3HardwareInterface *hw =
12853 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12854 if (!hw) {
12855 LOGE("NULL camera device");
12856 return -EINVAL;
12857 }
12858
12859 pthread_mutex_lock(&hw->mMutex);
12860 // Validate current state
12861 switch (hw->mState) {
12862 case STARTED:
12863 /* valid state */
12864 break;
12865
12866 case ERROR:
12867 pthread_mutex_unlock(&hw->mMutex);
12868 hw->handleCameraDeviceError();
12869 return -ENODEV;
12870
12871 default:
12872 LOGI("Flush returned during state %d", hw->mState);
12873 pthread_mutex_unlock(&hw->mMutex);
12874 return 0;
12875 }
12876 pthread_mutex_unlock(&hw->mMutex);
12877
12878 rc = hw->flush(true /* restart channels */ );
12879 LOGD("X");
12880 return rc;
12881}
12882
12883/*===========================================================================
12884 * FUNCTION : close_camera_device
12885 *
12886 * DESCRIPTION:
12887 *
12888 * PARAMETERS :
12889 *
12890 *
12891 * RETURN :
12892 *==========================================================================*/
12893int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12894{
12895 int ret = NO_ERROR;
12896 QCamera3HardwareInterface *hw =
12897 reinterpret_cast<QCamera3HardwareInterface *>(
12898 reinterpret_cast<camera3_device_t *>(device)->priv);
12899 if (!hw) {
12900 LOGE("NULL camera device");
12901 return BAD_VALUE;
12902 }
12903
12904 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12905 delete hw;
12906 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012907 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012908 return ret;
12909}
12910
12911/*===========================================================================
12912 * FUNCTION : getWaveletDenoiseProcessPlate
12913 *
12914 * DESCRIPTION: query wavelet denoise process plate
12915 *
12916 * PARAMETERS : None
12917 *
12918 * RETURN : WNR prcocess plate value
12919 *==========================================================================*/
12920cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12921{
12922 char prop[PROPERTY_VALUE_MAX];
12923 memset(prop, 0, sizeof(prop));
12924 property_get("persist.denoise.process.plates", prop, "0");
12925 int processPlate = atoi(prop);
12926 switch(processPlate) {
12927 case 0:
12928 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12929 case 1:
12930 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12931 case 2:
12932 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12933 case 3:
12934 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12935 default:
12936 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12937 }
12938}
12939
12940
12941/*===========================================================================
12942 * FUNCTION : getTemporalDenoiseProcessPlate
12943 *
12944 * DESCRIPTION: query temporal denoise process plate
12945 *
12946 * PARAMETERS : None
12947 *
12948 * RETURN : TNR prcocess plate value
12949 *==========================================================================*/
12950cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
12951{
12952 char prop[PROPERTY_VALUE_MAX];
12953 memset(prop, 0, sizeof(prop));
12954 property_get("persist.tnr.process.plates", prop, "0");
12955 int processPlate = atoi(prop);
12956 switch(processPlate) {
12957 case 0:
12958 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12959 case 1:
12960 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12961 case 2:
12962 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12963 case 3:
12964 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12965 default:
12966 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12967 }
12968}
12969
12970
12971/*===========================================================================
12972 * FUNCTION : extractSceneMode
12973 *
12974 * DESCRIPTION: Extract scene mode from frameworks set metadata
12975 *
12976 * PARAMETERS :
12977 * @frame_settings: CameraMetadata reference
12978 * @metaMode: ANDROID_CONTORL_MODE
12979 * @hal_metadata: hal metadata structure
12980 *
12981 * RETURN : None
12982 *==========================================================================*/
12983int32_t QCamera3HardwareInterface::extractSceneMode(
12984 const CameraMetadata &frame_settings, uint8_t metaMode,
12985 metadata_buffer_t *hal_metadata)
12986{
12987 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080012988 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
12989
12990 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
12991 LOGD("Ignoring control mode OFF_KEEP_STATE");
12992 return NO_ERROR;
12993 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012994
12995 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
12996 camera_metadata_ro_entry entry =
12997 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
12998 if (0 == entry.count)
12999 return rc;
13000
13001 uint8_t fwk_sceneMode = entry.data.u8[0];
13002
13003 int val = lookupHalName(SCENE_MODES_MAP,
13004 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13005 fwk_sceneMode);
13006 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013007 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013008 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013009 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013010 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013011
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013012 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13013 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13014 }
13015
13016 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13017 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013018 cam_hdr_param_t hdr_params;
13019 hdr_params.hdr_enable = 1;
13020 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13021 hdr_params.hdr_need_1x = false;
13022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13023 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13024 rc = BAD_VALUE;
13025 }
13026 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013027
Thierry Strudel3d639192016-09-09 11:52:26 -070013028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13029 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13030 rc = BAD_VALUE;
13031 }
13032 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013033
13034 if (mForceHdrSnapshot) {
13035 cam_hdr_param_t hdr_params;
13036 hdr_params.hdr_enable = 1;
13037 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13038 hdr_params.hdr_need_1x = false;
13039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13040 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13041 rc = BAD_VALUE;
13042 }
13043 }
13044
Thierry Strudel3d639192016-09-09 11:52:26 -070013045 return rc;
13046}
13047
13048/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013049 * FUNCTION : setVideoHdrMode
13050 *
13051 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13052 *
13053 * PARAMETERS :
13054 * @hal_metadata: hal metadata structure
13055 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13056 *
13057 * RETURN : None
13058 *==========================================================================*/
13059int32_t QCamera3HardwareInterface::setVideoHdrMode(
13060 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13061{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013062 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13063 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13064 }
13065
13066 LOGE("Invalid Video HDR mode %d!", vhdr);
13067 return BAD_VALUE;
13068}
13069
13070/*===========================================================================
13071 * FUNCTION : setSensorHDR
13072 *
13073 * DESCRIPTION: Enable/disable sensor HDR.
13074 *
13075 * PARAMETERS :
13076 * @hal_metadata: hal metadata structure
13077 * @enable: boolean whether to enable/disable sensor HDR
13078 *
13079 * RETURN : None
13080 *==========================================================================*/
13081int32_t QCamera3HardwareInterface::setSensorHDR(
13082 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13083{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013084 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013085 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13086
13087 if (enable) {
13088 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13089 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13090 #ifdef _LE_CAMERA_
13091 //Default to staggered HDR for IOT
13092 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13093 #else
13094 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13095 #endif
13096 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13097 }
13098
13099 bool isSupported = false;
13100 switch (sensor_hdr) {
13101 case CAM_SENSOR_HDR_IN_SENSOR:
13102 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13103 CAM_QCOM_FEATURE_SENSOR_HDR) {
13104 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013105 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013106 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013107 break;
13108 case CAM_SENSOR_HDR_ZIGZAG:
13109 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13110 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13111 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013112 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013113 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013114 break;
13115 case CAM_SENSOR_HDR_STAGGERED:
13116 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13117 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13118 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013119 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013120 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013121 break;
13122 case CAM_SENSOR_HDR_OFF:
13123 isSupported = true;
13124 LOGD("Turning off sensor HDR");
13125 break;
13126 default:
13127 LOGE("HDR mode %d not supported", sensor_hdr);
13128 rc = BAD_VALUE;
13129 break;
13130 }
13131
13132 if(isSupported) {
13133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13134 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13135 rc = BAD_VALUE;
13136 } else {
13137 if(!isVideoHdrEnable)
13138 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013139 }
13140 }
13141 return rc;
13142}
13143
13144/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013145 * FUNCTION : needRotationReprocess
13146 *
13147 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13148 *
13149 * PARAMETERS : none
13150 *
13151 * RETURN : true: needed
13152 * false: no need
13153 *==========================================================================*/
13154bool QCamera3HardwareInterface::needRotationReprocess()
13155{
13156 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13157 // current rotation is not zero, and pp has the capability to process rotation
13158 LOGH("need do reprocess for rotation");
13159 return true;
13160 }
13161
13162 return false;
13163}
13164
13165/*===========================================================================
13166 * FUNCTION : needReprocess
13167 *
13168 * DESCRIPTION: if reprocess in needed
13169 *
13170 * PARAMETERS : none
13171 *
13172 * RETURN : true: needed
13173 * false: no need
13174 *==========================================================================*/
13175bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13176{
13177 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13178 // TODO: add for ZSL HDR later
13179 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13180 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13181 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13182 return true;
13183 } else {
13184 LOGH("already post processed frame");
13185 return false;
13186 }
13187 }
13188 return needRotationReprocess();
13189}
13190
13191/*===========================================================================
13192 * FUNCTION : needJpegExifRotation
13193 *
13194 * DESCRIPTION: if rotation from jpeg is needed
13195 *
13196 * PARAMETERS : none
13197 *
13198 * RETURN : true: needed
13199 * false: no need
13200 *==========================================================================*/
13201bool QCamera3HardwareInterface::needJpegExifRotation()
13202{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013203 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013204 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13205 LOGD("Need use Jpeg EXIF Rotation");
13206 return true;
13207 }
13208 return false;
13209}
13210
13211/*===========================================================================
13212 * FUNCTION : addOfflineReprocChannel
13213 *
13214 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13215 * coming from input channel
13216 *
13217 * PARAMETERS :
13218 * @config : reprocess configuration
13219 * @inputChHandle : pointer to the input (source) channel
13220 *
13221 *
13222 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13223 *==========================================================================*/
13224QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13225 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13226{
13227 int32_t rc = NO_ERROR;
13228 QCamera3ReprocessChannel *pChannel = NULL;
13229
13230 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013231 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13232 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013233 if (NULL == pChannel) {
13234 LOGE("no mem for reprocess channel");
13235 return NULL;
13236 }
13237
13238 rc = pChannel->initialize(IS_TYPE_NONE);
13239 if (rc != NO_ERROR) {
13240 LOGE("init reprocess channel failed, ret = %d", rc);
13241 delete pChannel;
13242 return NULL;
13243 }
13244
13245 // pp feature config
13246 cam_pp_feature_config_t pp_config;
13247 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13248
13249 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13250 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13251 & CAM_QCOM_FEATURE_DSDN) {
13252 //Use CPP CDS incase h/w supports it.
13253 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13254 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13255 }
13256 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13257 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13258 }
13259
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013260 if (config.hdr_param.hdr_enable) {
13261 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13262 pp_config.hdr_param = config.hdr_param;
13263 }
13264
13265 if (mForceHdrSnapshot) {
13266 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13267 pp_config.hdr_param.hdr_enable = 1;
13268 pp_config.hdr_param.hdr_need_1x = 0;
13269 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13270 }
13271
Thierry Strudel3d639192016-09-09 11:52:26 -070013272 rc = pChannel->addReprocStreamsFromSource(pp_config,
13273 config,
13274 IS_TYPE_NONE,
13275 mMetadataChannel);
13276
13277 if (rc != NO_ERROR) {
13278 delete pChannel;
13279 return NULL;
13280 }
13281 return pChannel;
13282}
13283
13284/*===========================================================================
13285 * FUNCTION : getMobicatMask
13286 *
13287 * DESCRIPTION: returns mobicat mask
13288 *
13289 * PARAMETERS : none
13290 *
13291 * RETURN : mobicat mask
13292 *
13293 *==========================================================================*/
13294uint8_t QCamera3HardwareInterface::getMobicatMask()
13295{
13296 return m_MobicatMask;
13297}
13298
13299/*===========================================================================
13300 * FUNCTION : setMobicat
13301 *
13302 * DESCRIPTION: set Mobicat on/off.
13303 *
13304 * PARAMETERS :
13305 * @params : none
13306 *
13307 * RETURN : int32_t type of status
13308 * NO_ERROR -- success
13309 * none-zero failure code
13310 *==========================================================================*/
13311int32_t QCamera3HardwareInterface::setMobicat()
13312{
13313 char value [PROPERTY_VALUE_MAX];
13314 property_get("persist.camera.mobicat", value, "0");
13315 int32_t ret = NO_ERROR;
13316 uint8_t enableMobi = (uint8_t)atoi(value);
13317
13318 if (enableMobi) {
13319 tune_cmd_t tune_cmd;
13320 tune_cmd.type = SET_RELOAD_CHROMATIX;
13321 tune_cmd.module = MODULE_ALL;
13322 tune_cmd.value = TRUE;
13323 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13324 CAM_INTF_PARM_SET_VFE_COMMAND,
13325 tune_cmd);
13326
13327 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13328 CAM_INTF_PARM_SET_PP_COMMAND,
13329 tune_cmd);
13330 }
13331 m_MobicatMask = enableMobi;
13332
13333 return ret;
13334}
13335
13336/*===========================================================================
13337* FUNCTION : getLogLevel
13338*
13339* DESCRIPTION: Reads the log level property into a variable
13340*
13341* PARAMETERS :
13342* None
13343*
13344* RETURN :
13345* None
13346*==========================================================================*/
13347void QCamera3HardwareInterface::getLogLevel()
13348{
13349 char prop[PROPERTY_VALUE_MAX];
13350 uint32_t globalLogLevel = 0;
13351
13352 property_get("persist.camera.hal.debug", prop, "0");
13353 int val = atoi(prop);
13354 if (0 <= val) {
13355 gCamHal3LogLevel = (uint32_t)val;
13356 }
13357
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013358 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013359 gKpiDebugLevel = atoi(prop);
13360
13361 property_get("persist.camera.global.debug", prop, "0");
13362 val = atoi(prop);
13363 if (0 <= val) {
13364 globalLogLevel = (uint32_t)val;
13365 }
13366
13367 /* Highest log level among hal.logs and global.logs is selected */
13368 if (gCamHal3LogLevel < globalLogLevel)
13369 gCamHal3LogLevel = globalLogLevel;
13370
13371 return;
13372}
13373
13374/*===========================================================================
13375 * FUNCTION : validateStreamRotations
13376 *
13377 * DESCRIPTION: Check if the rotations requested are supported
13378 *
13379 * PARAMETERS :
13380 * @stream_list : streams to be configured
13381 *
13382 * RETURN : NO_ERROR on success
13383 * -EINVAL on failure
13384 *
13385 *==========================================================================*/
13386int QCamera3HardwareInterface::validateStreamRotations(
13387 camera3_stream_configuration_t *streamList)
13388{
13389 int rc = NO_ERROR;
13390
13391 /*
13392 * Loop through all streams requested in configuration
13393 * Check if unsupported rotations have been requested on any of them
13394 */
13395 for (size_t j = 0; j < streamList->num_streams; j++){
13396 camera3_stream_t *newStream = streamList->streams[j];
13397
13398 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13399 bool isImplDef = (newStream->format ==
13400 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13401 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13402 isImplDef);
13403
13404 if (isRotated && (!isImplDef || isZsl)) {
13405 LOGE("Error: Unsupported rotation of %d requested for stream"
13406 "type:%d and stream format:%d",
13407 newStream->rotation, newStream->stream_type,
13408 newStream->format);
13409 rc = -EINVAL;
13410 break;
13411 }
13412 }
13413
13414 return rc;
13415}
13416
13417/*===========================================================================
13418* FUNCTION : getFlashInfo
13419*
13420* DESCRIPTION: Retrieve information about whether the device has a flash.
13421*
13422* PARAMETERS :
13423* @cameraId : Camera id to query
13424* @hasFlash : Boolean indicating whether there is a flash device
13425* associated with given camera
13426* @flashNode : If a flash device exists, this will be its device node.
13427*
13428* RETURN :
13429* None
13430*==========================================================================*/
13431void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13432 bool& hasFlash,
13433 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13434{
13435 cam_capability_t* camCapability = gCamCapability[cameraId];
13436 if (NULL == camCapability) {
13437 hasFlash = false;
13438 flashNode[0] = '\0';
13439 } else {
13440 hasFlash = camCapability->flash_available;
13441 strlcpy(flashNode,
13442 (char*)camCapability->flash_dev_name,
13443 QCAMERA_MAX_FILEPATH_LENGTH);
13444 }
13445}
13446
13447/*===========================================================================
13448* FUNCTION : getEepromVersionInfo
13449*
13450* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13451*
13452* PARAMETERS : None
13453*
13454* RETURN : string describing EEPROM version
13455* "\0" if no such info available
13456*==========================================================================*/
13457const char *QCamera3HardwareInterface::getEepromVersionInfo()
13458{
13459 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13460}
13461
13462/*===========================================================================
13463* FUNCTION : getLdafCalib
13464*
13465* DESCRIPTION: Retrieve Laser AF calibration data
13466*
13467* PARAMETERS : None
13468*
13469* RETURN : Two uint32_t describing laser AF calibration data
13470* NULL if none is available.
13471*==========================================================================*/
13472const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13473{
13474 if (mLdafCalibExist) {
13475 return &mLdafCalib[0];
13476 } else {
13477 return NULL;
13478 }
13479}
13480
13481/*===========================================================================
13482 * FUNCTION : dynamicUpdateMetaStreamInfo
13483 *
13484 * DESCRIPTION: This function:
13485 * (1) stops all the channels
13486 * (2) returns error on pending requests and buffers
13487 * (3) sends metastream_info in setparams
13488 * (4) starts all channels
13489 * This is useful when sensor has to be restarted to apply any
13490 * settings such as frame rate from a different sensor mode
13491 *
13492 * PARAMETERS : None
13493 *
13494 * RETURN : NO_ERROR on success
13495 * Error codes on failure
13496 *
13497 *==========================================================================*/
13498int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13499{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013500 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013501 int rc = NO_ERROR;
13502
13503 LOGD("E");
13504
13505 rc = stopAllChannels();
13506 if (rc < 0) {
13507 LOGE("stopAllChannels failed");
13508 return rc;
13509 }
13510
13511 rc = notifyErrorForPendingRequests();
13512 if (rc < 0) {
13513 LOGE("notifyErrorForPendingRequests failed");
13514 return rc;
13515 }
13516
13517 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13518 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13519 "Format:%d",
13520 mStreamConfigInfo.type[i],
13521 mStreamConfigInfo.stream_sizes[i].width,
13522 mStreamConfigInfo.stream_sizes[i].height,
13523 mStreamConfigInfo.postprocess_mask[i],
13524 mStreamConfigInfo.format[i]);
13525 }
13526
13527 /* Send meta stream info once again so that ISP can start */
13528 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13529 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13530 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13531 mParameters);
13532 if (rc < 0) {
13533 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13534 }
13535
13536 rc = startAllChannels();
13537 if (rc < 0) {
13538 LOGE("startAllChannels failed");
13539 return rc;
13540 }
13541
13542 LOGD("X");
13543 return rc;
13544}
13545
13546/*===========================================================================
13547 * FUNCTION : stopAllChannels
13548 *
13549 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13550 *
13551 * PARAMETERS : None
13552 *
13553 * RETURN : NO_ERROR on success
13554 * Error codes on failure
13555 *
13556 *==========================================================================*/
13557int32_t QCamera3HardwareInterface::stopAllChannels()
13558{
13559 int32_t rc = NO_ERROR;
13560
13561 LOGD("Stopping all channels");
13562 // Stop the Streams/Channels
13563 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13564 it != mStreamInfo.end(); it++) {
13565 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13566 if (channel) {
13567 channel->stop();
13568 }
13569 (*it)->status = INVALID;
13570 }
13571
13572 if (mSupportChannel) {
13573 mSupportChannel->stop();
13574 }
13575 if (mAnalysisChannel) {
13576 mAnalysisChannel->stop();
13577 }
13578 if (mRawDumpChannel) {
13579 mRawDumpChannel->stop();
13580 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013581 if (mHdrPlusRawSrcChannel) {
13582 mHdrPlusRawSrcChannel->stop();
13583 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013584 if (mMetadataChannel) {
13585 /* If content of mStreamInfo is not 0, there is metadata stream */
13586 mMetadataChannel->stop();
13587 }
13588
13589 LOGD("All channels stopped");
13590 return rc;
13591}
13592
13593/*===========================================================================
13594 * FUNCTION : startAllChannels
13595 *
13596 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13597 *
13598 * PARAMETERS : None
13599 *
13600 * RETURN : NO_ERROR on success
13601 * Error codes on failure
13602 *
13603 *==========================================================================*/
13604int32_t QCamera3HardwareInterface::startAllChannels()
13605{
13606 int32_t rc = NO_ERROR;
13607
13608 LOGD("Start all channels ");
13609 // Start the Streams/Channels
13610 if (mMetadataChannel) {
13611 /* If content of mStreamInfo is not 0, there is metadata stream */
13612 rc = mMetadataChannel->start();
13613 if (rc < 0) {
13614 LOGE("META channel start failed");
13615 return rc;
13616 }
13617 }
13618 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13619 it != mStreamInfo.end(); it++) {
13620 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13621 if (channel) {
13622 rc = channel->start();
13623 if (rc < 0) {
13624 LOGE("channel start failed");
13625 return rc;
13626 }
13627 }
13628 }
13629 if (mAnalysisChannel) {
13630 mAnalysisChannel->start();
13631 }
13632 if (mSupportChannel) {
13633 rc = mSupportChannel->start();
13634 if (rc < 0) {
13635 LOGE("Support channel start failed");
13636 return rc;
13637 }
13638 }
13639 if (mRawDumpChannel) {
13640 rc = mRawDumpChannel->start();
13641 if (rc < 0) {
13642 LOGE("RAW dump channel start failed");
13643 return rc;
13644 }
13645 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013646 if (mHdrPlusRawSrcChannel) {
13647 rc = mHdrPlusRawSrcChannel->start();
13648 if (rc < 0) {
13649 LOGE("HDR+ RAW channel start failed");
13650 return rc;
13651 }
13652 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013653
13654 LOGD("All channels started");
13655 return rc;
13656}
13657
13658/*===========================================================================
13659 * FUNCTION : notifyErrorForPendingRequests
13660 *
13661 * DESCRIPTION: This function sends error for all the pending requests/buffers
13662 *
13663 * PARAMETERS : None
13664 *
13665 * RETURN : Error codes
13666 * NO_ERROR on success
13667 *
13668 *==========================================================================*/
13669int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13670{
13671 int32_t rc = NO_ERROR;
13672 unsigned int frameNum = 0;
13673 camera3_capture_result_t result;
13674 camera3_stream_buffer_t *pStream_Buf = NULL;
13675
13676 memset(&result, 0, sizeof(camera3_capture_result_t));
13677
13678 if (mPendingRequestsList.size() > 0) {
13679 pendingRequestIterator i = mPendingRequestsList.begin();
13680 frameNum = i->frame_number;
13681 } else {
13682 /* There might still be pending buffers even though there are
13683 no pending requests. Setting the frameNum to MAX so that
13684 all the buffers with smaller frame numbers are returned */
13685 frameNum = UINT_MAX;
13686 }
13687
13688 LOGH("Oldest frame num on mPendingRequestsList = %u",
13689 frameNum);
13690
Emilian Peev7650c122017-01-19 08:24:33 -080013691 notifyErrorFoPendingDepthData(mDepthChannel);
13692
Thierry Strudel3d639192016-09-09 11:52:26 -070013693 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13694 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13695
13696 if (req->frame_number < frameNum) {
13697 // Send Error notify to frameworks for each buffer for which
13698 // metadata buffer is already sent
13699 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13700 req->frame_number, req->mPendingBufferList.size());
13701
13702 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13703 if (NULL == pStream_Buf) {
13704 LOGE("No memory for pending buffers array");
13705 return NO_MEMORY;
13706 }
13707 memset(pStream_Buf, 0,
13708 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13709 result.result = NULL;
13710 result.frame_number = req->frame_number;
13711 result.num_output_buffers = req->mPendingBufferList.size();
13712 result.output_buffers = pStream_Buf;
13713
13714 size_t index = 0;
13715 for (auto info = req->mPendingBufferList.begin();
13716 info != req->mPendingBufferList.end(); ) {
13717
13718 camera3_notify_msg_t notify_msg;
13719 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13720 notify_msg.type = CAMERA3_MSG_ERROR;
13721 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13722 notify_msg.message.error.error_stream = info->stream;
13723 notify_msg.message.error.frame_number = req->frame_number;
13724 pStream_Buf[index].acquire_fence = -1;
13725 pStream_Buf[index].release_fence = -1;
13726 pStream_Buf[index].buffer = info->buffer;
13727 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13728 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013729 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013730 index++;
13731 // Remove buffer from list
13732 info = req->mPendingBufferList.erase(info);
13733 }
13734
13735 // Remove this request from Map
13736 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13737 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13738 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13739
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013740 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013741
13742 delete [] pStream_Buf;
13743 } else {
13744
13745 // Go through the pending requests info and send error request to framework
13746 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13747
13748 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13749
13750 // Send error notify to frameworks
13751 camera3_notify_msg_t notify_msg;
13752 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13753 notify_msg.type = CAMERA3_MSG_ERROR;
13754 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13755 notify_msg.message.error.error_stream = NULL;
13756 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013757 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013758
13759 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13760 if (NULL == pStream_Buf) {
13761 LOGE("No memory for pending buffers array");
13762 return NO_MEMORY;
13763 }
13764 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13765
13766 result.result = NULL;
13767 result.frame_number = req->frame_number;
13768 result.input_buffer = i->input_buffer;
13769 result.num_output_buffers = req->mPendingBufferList.size();
13770 result.output_buffers = pStream_Buf;
13771
13772 size_t index = 0;
13773 for (auto info = req->mPendingBufferList.begin();
13774 info != req->mPendingBufferList.end(); ) {
13775 pStream_Buf[index].acquire_fence = -1;
13776 pStream_Buf[index].release_fence = -1;
13777 pStream_Buf[index].buffer = info->buffer;
13778 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13779 pStream_Buf[index].stream = info->stream;
13780 index++;
13781 // Remove buffer from list
13782 info = req->mPendingBufferList.erase(info);
13783 }
13784
13785 // Remove this request from Map
13786 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13787 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13788 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13789
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013790 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013791 delete [] pStream_Buf;
13792 i = erasePendingRequest(i);
13793 }
13794 }
13795
13796 /* Reset pending frame Drop list and requests list */
13797 mPendingFrameDropList.clear();
13798
13799 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13800 req.mPendingBufferList.clear();
13801 }
13802 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013803 LOGH("Cleared all the pending buffers ");
13804
13805 return rc;
13806}
13807
13808bool QCamera3HardwareInterface::isOnEncoder(
13809 const cam_dimension_t max_viewfinder_size,
13810 uint32_t width, uint32_t height)
13811{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013812 return ((width > (uint32_t)max_viewfinder_size.width) ||
13813 (height > (uint32_t)max_viewfinder_size.height) ||
13814 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13815 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013816}
13817
13818/*===========================================================================
13819 * FUNCTION : setBundleInfo
13820 *
13821 * DESCRIPTION: Set bundle info for all streams that are bundle.
13822 *
13823 * PARAMETERS : None
13824 *
13825 * RETURN : NO_ERROR on success
13826 * Error codes on failure
13827 *==========================================================================*/
13828int32_t QCamera3HardwareInterface::setBundleInfo()
13829{
13830 int32_t rc = NO_ERROR;
13831
13832 if (mChannelHandle) {
13833 cam_bundle_config_t bundleInfo;
13834 memset(&bundleInfo, 0, sizeof(bundleInfo));
13835 rc = mCameraHandle->ops->get_bundle_info(
13836 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13837 if (rc != NO_ERROR) {
13838 LOGE("get_bundle_info failed");
13839 return rc;
13840 }
13841 if (mAnalysisChannel) {
13842 mAnalysisChannel->setBundleInfo(bundleInfo);
13843 }
13844 if (mSupportChannel) {
13845 mSupportChannel->setBundleInfo(bundleInfo);
13846 }
13847 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13848 it != mStreamInfo.end(); it++) {
13849 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13850 channel->setBundleInfo(bundleInfo);
13851 }
13852 if (mRawDumpChannel) {
13853 mRawDumpChannel->setBundleInfo(bundleInfo);
13854 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013855 if (mHdrPlusRawSrcChannel) {
13856 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13857 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013858 }
13859
13860 return rc;
13861}
13862
13863/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013864 * FUNCTION : setInstantAEC
13865 *
13866 * DESCRIPTION: Set Instant AEC related params.
13867 *
13868 * PARAMETERS :
13869 * @meta: CameraMetadata reference
13870 *
13871 * RETURN : NO_ERROR on success
13872 * Error codes on failure
13873 *==========================================================================*/
13874int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13875{
13876 int32_t rc = NO_ERROR;
13877 uint8_t val = 0;
13878 char prop[PROPERTY_VALUE_MAX];
13879
13880 // First try to configure instant AEC from framework metadata
13881 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13882 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13883 }
13884
13885 // If framework did not set this value, try to read from set prop.
13886 if (val == 0) {
13887 memset(prop, 0, sizeof(prop));
13888 property_get("persist.camera.instant.aec", prop, "0");
13889 val = (uint8_t)atoi(prop);
13890 }
13891
13892 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13893 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13894 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13895 mInstantAEC = val;
13896 mInstantAECSettledFrameNumber = 0;
13897 mInstantAecFrameIdxCount = 0;
13898 LOGH("instantAEC value set %d",val);
13899 if (mInstantAEC) {
13900 memset(prop, 0, sizeof(prop));
13901 property_get("persist.camera.ae.instant.bound", prop, "10");
13902 int32_t aec_frame_skip_cnt = atoi(prop);
13903 if (aec_frame_skip_cnt >= 0) {
13904 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13905 } else {
13906 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13907 rc = BAD_VALUE;
13908 }
13909 }
13910 } else {
13911 LOGE("Bad instant aec value set %d", val);
13912 rc = BAD_VALUE;
13913 }
13914 return rc;
13915}
13916
13917/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013918 * FUNCTION : get_num_overall_buffers
13919 *
13920 * DESCRIPTION: Estimate number of pending buffers across all requests.
13921 *
13922 * PARAMETERS : None
13923 *
13924 * RETURN : Number of overall pending buffers
13925 *
13926 *==========================================================================*/
13927uint32_t PendingBuffersMap::get_num_overall_buffers()
13928{
13929 uint32_t sum_buffers = 0;
13930 for (auto &req : mPendingBuffersInRequest) {
13931 sum_buffers += req.mPendingBufferList.size();
13932 }
13933 return sum_buffers;
13934}
13935
13936/*===========================================================================
13937 * FUNCTION : removeBuf
13938 *
13939 * DESCRIPTION: Remove a matching buffer from tracker.
13940 *
13941 * PARAMETERS : @buffer: image buffer for the callback
13942 *
13943 * RETURN : None
13944 *
13945 *==========================================================================*/
13946void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
13947{
13948 bool buffer_found = false;
13949 for (auto req = mPendingBuffersInRequest.begin();
13950 req != mPendingBuffersInRequest.end(); req++) {
13951 for (auto k = req->mPendingBufferList.begin();
13952 k != req->mPendingBufferList.end(); k++ ) {
13953 if (k->buffer == buffer) {
13954 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
13955 req->frame_number, buffer);
13956 k = req->mPendingBufferList.erase(k);
13957 if (req->mPendingBufferList.empty()) {
13958 // Remove this request from Map
13959 req = mPendingBuffersInRequest.erase(req);
13960 }
13961 buffer_found = true;
13962 break;
13963 }
13964 }
13965 if (buffer_found) {
13966 break;
13967 }
13968 }
13969 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
13970 get_num_overall_buffers());
13971}
13972
13973/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013974 * FUNCTION : getBufErrStatus
13975 *
13976 * DESCRIPTION: get buffer error status
13977 *
13978 * PARAMETERS : @buffer: buffer handle
13979 *
13980 * RETURN : Error status
13981 *
13982 *==========================================================================*/
13983int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
13984{
13985 for (auto& req : mPendingBuffersInRequest) {
13986 for (auto& k : req.mPendingBufferList) {
13987 if (k.buffer == buffer)
13988 return k.bufStatus;
13989 }
13990 }
13991 return CAMERA3_BUFFER_STATUS_OK;
13992}
13993
13994/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013995 * FUNCTION : setPAAFSupport
13996 *
13997 * DESCRIPTION: Set the preview-assisted auto focus support bit in
13998 * feature mask according to stream type and filter
13999 * arrangement
14000 *
14001 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14002 * @stream_type: stream type
14003 * @filter_arrangement: filter arrangement
14004 *
14005 * RETURN : None
14006 *==========================================================================*/
14007void QCamera3HardwareInterface::setPAAFSupport(
14008 cam_feature_mask_t& feature_mask,
14009 cam_stream_type_t stream_type,
14010 cam_color_filter_arrangement_t filter_arrangement)
14011{
Thierry Strudel3d639192016-09-09 11:52:26 -070014012 switch (filter_arrangement) {
14013 case CAM_FILTER_ARRANGEMENT_RGGB:
14014 case CAM_FILTER_ARRANGEMENT_GRBG:
14015 case CAM_FILTER_ARRANGEMENT_GBRG:
14016 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014017 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14018 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014019 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014020 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14021 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014022 }
14023 break;
14024 case CAM_FILTER_ARRANGEMENT_Y:
14025 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14026 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14027 }
14028 break;
14029 default:
14030 break;
14031 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014032 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14033 feature_mask, stream_type, filter_arrangement);
14034
14035
Thierry Strudel3d639192016-09-09 11:52:26 -070014036}
14037
14038/*===========================================================================
14039* FUNCTION : getSensorMountAngle
14040*
14041* DESCRIPTION: Retrieve sensor mount angle
14042*
14043* PARAMETERS : None
14044*
14045* RETURN : sensor mount angle in uint32_t
14046*==========================================================================*/
14047uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14048{
14049 return gCamCapability[mCameraId]->sensor_mount_angle;
14050}
14051
14052/*===========================================================================
14053* FUNCTION : getRelatedCalibrationData
14054*
14055* DESCRIPTION: Retrieve related system calibration data
14056*
14057* PARAMETERS : None
14058*
14059* RETURN : Pointer of related system calibration data
14060*==========================================================================*/
14061const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14062{
14063 return (const cam_related_system_calibration_data_t *)
14064 &(gCamCapability[mCameraId]->related_cam_calibration);
14065}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014066
14067/*===========================================================================
14068 * FUNCTION : is60HzZone
14069 *
14070 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14071 *
14072 * PARAMETERS : None
14073 *
14074 * RETURN : True if in 60Hz zone, False otherwise
14075 *==========================================================================*/
14076bool QCamera3HardwareInterface::is60HzZone()
14077{
14078 time_t t = time(NULL);
14079 struct tm lt;
14080
14081 struct tm* r = localtime_r(&t, &lt);
14082
14083 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14084 return true;
14085 else
14086 return false;
14087}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014088
14089/*===========================================================================
14090 * FUNCTION : adjustBlackLevelForCFA
14091 *
14092 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14093 * of bayer CFA (Color Filter Array).
14094 *
14095 * PARAMETERS : @input: black level pattern in the order of RGGB
14096 * @output: black level pattern in the order of CFA
14097 * @color_arrangement: CFA color arrangement
14098 *
14099 * RETURN : None
14100 *==========================================================================*/
14101template<typename T>
14102void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14103 T input[BLACK_LEVEL_PATTERN_CNT],
14104 T output[BLACK_LEVEL_PATTERN_CNT],
14105 cam_color_filter_arrangement_t color_arrangement)
14106{
14107 switch (color_arrangement) {
14108 case CAM_FILTER_ARRANGEMENT_GRBG:
14109 output[0] = input[1];
14110 output[1] = input[0];
14111 output[2] = input[3];
14112 output[3] = input[2];
14113 break;
14114 case CAM_FILTER_ARRANGEMENT_GBRG:
14115 output[0] = input[2];
14116 output[1] = input[3];
14117 output[2] = input[0];
14118 output[3] = input[1];
14119 break;
14120 case CAM_FILTER_ARRANGEMENT_BGGR:
14121 output[0] = input[3];
14122 output[1] = input[2];
14123 output[2] = input[1];
14124 output[3] = input[0];
14125 break;
14126 case CAM_FILTER_ARRANGEMENT_RGGB:
14127 output[0] = input[0];
14128 output[1] = input[1];
14129 output[2] = input[2];
14130 output[3] = input[3];
14131 break;
14132 default:
14133 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14134 break;
14135 }
14136}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014137
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014138void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14139 CameraMetadata &resultMetadata,
14140 std::shared_ptr<metadata_buffer_t> settings)
14141{
14142 if (settings == nullptr) {
14143 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14144 return;
14145 }
14146
14147 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14148 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14149 }
14150
14151 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14152 String8 str((const char *)gps_methods);
14153 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14154 }
14155
14156 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14157 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14158 }
14159
14160 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14161 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14162 }
14163
14164 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14165 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14166 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14167 }
14168
14169 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14170 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14171 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14172 }
14173
14174 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14175 int32_t fwk_thumb_size[2];
14176 fwk_thumb_size[0] = thumb_size->width;
14177 fwk_thumb_size[1] = thumb_size->height;
14178 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14179 }
14180
14181 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14182 uint8_t fwk_intent = intent[0];
14183 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14184 }
14185}
14186
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014187bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14188 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14189 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014190{
14191 if (hdrPlusRequest == nullptr) return false;
14192
14193 // Check noise reduction mode is high quality.
14194 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14195 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14196 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014197 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14198 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014199 return false;
14200 }
14201
14202 // Check edge mode is high quality.
14203 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14204 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14205 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14206 return false;
14207 }
14208
14209 if (request.num_output_buffers != 1 ||
14210 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14211 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014212 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14213 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14214 request.output_buffers[0].stream->width,
14215 request.output_buffers[0].stream->height,
14216 request.output_buffers[0].stream->format);
14217 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014218 return false;
14219 }
14220
14221 // Get a YUV buffer from pic channel.
14222 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14223 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14224 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14225 if (res != OK) {
14226 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14227 __FUNCTION__, strerror(-res), res);
14228 return false;
14229 }
14230
14231 pbcamera::StreamBuffer buffer;
14232 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014233 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014234 buffer.data = yuvBuffer->buffer;
14235 buffer.dataSize = yuvBuffer->frame_len;
14236
14237 pbcamera::CaptureRequest pbRequest;
14238 pbRequest.id = request.frame_number;
14239 pbRequest.outputBuffers.push_back(buffer);
14240
14241 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014242 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014243 if (res != OK) {
14244 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14245 strerror(-res), res);
14246 return false;
14247 }
14248
14249 hdrPlusRequest->yuvBuffer = yuvBuffer;
14250 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14251
14252 return true;
14253}
14254
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014255status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14256 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14257 return OK;
14258 }
14259
14260 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14261 if (res != OK) {
14262 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14263 strerror(-res), res);
14264 return res;
14265 }
14266 gHdrPlusClientOpening = true;
14267
14268 return OK;
14269}
14270
Chien-Yu Chenee335912017-02-09 17:53:20 -080014271status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14272{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014273 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014274
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014275 // Check if gHdrPlusClient is opened or being opened.
14276 if (gHdrPlusClient == nullptr) {
14277 if (gHdrPlusClientOpening) {
14278 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14279 return OK;
14280 }
14281
14282 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014283 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014284 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14285 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014286 return res;
14287 }
14288
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014289 // When opening HDR+ client completes, HDR+ mode will be enabled.
14290 return OK;
14291
Chien-Yu Chenee335912017-02-09 17:53:20 -080014292 }
14293
14294 // Configure stream for HDR+.
14295 res = configureHdrPlusStreamsLocked();
14296 if (res != OK) {
14297 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014298 return res;
14299 }
14300
14301 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14302 res = gHdrPlusClient->setZslHdrPlusMode(true);
14303 if (res != OK) {
14304 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014305 return res;
14306 }
14307
14308 mHdrPlusModeEnabled = true;
14309 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14310
14311 return OK;
14312}
14313
14314void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14315{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014316 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014317 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014318 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14319 if (res != OK) {
14320 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14321 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014322 }
14323
14324 mHdrPlusModeEnabled = false;
14325 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14326}
14327
14328status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014329{
14330 pbcamera::InputConfiguration inputConfig;
14331 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14332 status_t res = OK;
14333
14334 // Configure HDR+ client streams.
14335 // Get input config.
14336 if (mHdrPlusRawSrcChannel) {
14337 // HDR+ input buffers will be provided by HAL.
14338 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14339 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14340 if (res != OK) {
14341 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14342 __FUNCTION__, strerror(-res), res);
14343 return res;
14344 }
14345
14346 inputConfig.isSensorInput = false;
14347 } else {
14348 // Sensor MIPI will send data to Easel.
14349 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014350 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014351 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14352 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14353 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14354 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14355 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14356 if (mSensorModeInfo.num_raw_bits != 10) {
14357 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14358 mSensorModeInfo.num_raw_bits);
14359 return BAD_VALUE;
14360 }
14361
14362 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014363 }
14364
14365 // Get output configurations.
14366 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014367 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014368
14369 // Easel may need to output YUV output buffers if mPictureChannel was created.
14370 pbcamera::StreamConfiguration yuvOutputConfig;
14371 if (mPictureChannel != nullptr) {
14372 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14373 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14374 if (res != OK) {
14375 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14376 __FUNCTION__, strerror(-res), res);
14377
14378 return res;
14379 }
14380
14381 outputStreamConfigs.push_back(yuvOutputConfig);
14382 }
14383
14384 // TODO: consider other channels for YUV output buffers.
14385
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014386 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014387 if (res != OK) {
14388 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14389 strerror(-res), res);
14390 return res;
14391 }
14392
14393 return OK;
14394}
14395
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014396void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14397 if (client == nullptr) {
14398 ALOGE("%s: Opened client is null.", __FUNCTION__);
14399 return;
14400 }
14401
14402 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14403
14404 Mutex::Autolock l(gHdrPlusClientLock);
14405 gHdrPlusClient = std::move(client);
14406 gHdrPlusClientOpening = false;
14407
14408 // Set static metadata.
14409 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14410 if (res != OK) {
14411 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14412 __FUNCTION__, strerror(-res), res);
14413 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14414 gHdrPlusClient = nullptr;
14415 return;
14416 }
14417
14418 // Enable HDR+ mode.
14419 res = enableHdrPlusModeLocked();
14420 if (res != OK) {
14421 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14422 }
14423}
14424
14425void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14426 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14427 Mutex::Autolock l(gHdrPlusClientLock);
14428 gHdrPlusClientOpening = false;
14429}
14430
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014431void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14432 const camera_metadata_t &resultMetadata) {
14433 if (result != nullptr) {
14434 if (result->outputBuffers.size() != 1) {
14435 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14436 result->outputBuffers.size());
14437 return;
14438 }
14439
14440 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14441 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14442 result->outputBuffers[0].streamId);
14443 return;
14444 }
14445
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014446 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014447 HdrPlusPendingRequest pendingRequest;
14448 {
14449 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14450 auto req = mHdrPlusPendingRequests.find(result->requestId);
14451 pendingRequest = req->second;
14452 }
14453
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014454 // Update the result metadata with the settings of the HDR+ still capture request because
14455 // the result metadata belongs to a ZSL buffer.
14456 CameraMetadata metadata;
14457 metadata = &resultMetadata;
14458 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14459 camera_metadata_t* updatedResultMetadata = metadata.release();
14460
14461 QCamera3PicChannel *picChannel =
14462 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14463
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014464 // Check if dumping HDR+ YUV output is enabled.
14465 char prop[PROPERTY_VALUE_MAX];
14466 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14467 bool dumpYuvOutput = atoi(prop);
14468
14469 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014470 // Dump yuv buffer to a ppm file.
14471 pbcamera::StreamConfiguration outputConfig;
14472 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14473 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14474 if (rc == OK) {
14475 char buf[FILENAME_MAX] = {};
14476 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14477 result->requestId, result->outputBuffers[0].streamId,
14478 outputConfig.image.width, outputConfig.image.height);
14479
14480 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14481 } else {
14482 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14483 __FUNCTION__, strerror(-rc), rc);
14484 }
14485 }
14486
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014487 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14488 auto halMetadata = std::make_shared<metadata_buffer_t>();
14489 clear_metadata_buffer(halMetadata.get());
14490
14491 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14492 // encoding.
14493 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14494 halStreamId, /*minFrameDuration*/0);
14495 if (res == OK) {
14496 // Return the buffer to pic channel for encoding.
14497 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14498 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14499 halMetadata);
14500 } else {
14501 // Return the buffer without encoding.
14502 // TODO: This should not happen but we may want to report an error buffer to camera
14503 // service.
14504 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14505 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14506 strerror(-res), res);
14507 }
14508
14509 // Send HDR+ metadata to framework.
14510 {
14511 pthread_mutex_lock(&mMutex);
14512
14513 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14514 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14515 pthread_mutex_unlock(&mMutex);
14516 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014517
14518 // Remove the HDR+ pending request.
14519 {
14520 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14521 auto req = mHdrPlusPendingRequests.find(result->requestId);
14522 mHdrPlusPendingRequests.erase(req);
14523 }
14524 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014525}
14526
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014527void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14528 // TODO: Handle HDR+ capture failures and send the failure to framework.
14529 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14530 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14531
14532 // Return the buffer to pic channel.
14533 QCamera3PicChannel *picChannel =
14534 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14535 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14536
14537 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014538}
14539
Thierry Strudel3d639192016-09-09 11:52:26 -070014540}; //end namespace qcamera