blob: 223ea608db94771fc00c396e9306dfc2e1537389 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
143
144const QCamera3HardwareInterface::QCameraMap<
145 camera_metadata_enum_android_ir_mode_t,
146 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
147 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
148 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
149 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
150};
Thierry Strudel3d639192016-09-09 11:52:26 -0700151
152const QCamera3HardwareInterface::QCameraMap<
153 camera_metadata_enum_android_control_effect_mode_t,
154 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
155 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
156 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
157 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
158 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
159 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
160 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
161 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
162 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
163 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
164};
165
166const QCamera3HardwareInterface::QCameraMap<
167 camera_metadata_enum_android_control_awb_mode_t,
168 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
169 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
170 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
171 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
172 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
173 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
174 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
175 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
176 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
177 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
178};
179
180const QCamera3HardwareInterface::QCameraMap<
181 camera_metadata_enum_android_control_scene_mode_t,
182 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
183 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
184 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
185 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
187 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
188 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
189 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
190 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
191 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
192 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
193 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
194 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
195 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
196 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
197 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800198 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
199 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700200};
201
202const QCamera3HardwareInterface::QCameraMap<
203 camera_metadata_enum_android_control_af_mode_t,
204 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
205 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
206 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
207 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
208 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
209 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
210 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
211 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
212};
213
214const QCamera3HardwareInterface::QCameraMap<
215 camera_metadata_enum_android_color_correction_aberration_mode_t,
216 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
217 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
218 CAM_COLOR_CORRECTION_ABERRATION_OFF },
219 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
220 CAM_COLOR_CORRECTION_ABERRATION_FAST },
221 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
222 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
223};
224
225const QCamera3HardwareInterface::QCameraMap<
226 camera_metadata_enum_android_control_ae_antibanding_mode_t,
227 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
228 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
229 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
230 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
231 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_ae_mode_t,
236 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
237 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
238 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
239 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
240 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
241 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
242};
243
244const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_flash_mode_t,
246 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
247 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
248 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
249 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_statistics_face_detect_mode_t,
254 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
255 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
256 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
257 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
258};
259
260const QCamera3HardwareInterface::QCameraMap<
261 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
262 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
263 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
264 CAM_FOCUS_UNCALIBRATED },
265 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
266 CAM_FOCUS_APPROXIMATE },
267 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
268 CAM_FOCUS_CALIBRATED }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_lens_state_t,
273 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
274 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
275 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
276};
277
278const int32_t available_thumbnail_sizes[] = {0, 0,
279 176, 144,
280 240, 144,
281 256, 144,
282 240, 160,
283 256, 154,
284 240, 240,
285 320, 240};
286
287const QCamera3HardwareInterface::QCameraMap<
288 camera_metadata_enum_android_sensor_test_pattern_mode_t,
289 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
292 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
293 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
294 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
295 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
296};
297
298/* Since there is no mapping for all the options some Android enum are not listed.
299 * Also, the order in this list is important because while mapping from HAL to Android it will
300 * traverse from lower to higher index which means that for HAL values that are map to different
301 * Android values, the traverse logic will select the first one found.
302 */
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_sensor_reference_illuminant1_t,
305 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
321 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
322};
323
324const QCamera3HardwareInterface::QCameraMap<
325 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
326 { 60, CAM_HFR_MODE_60FPS},
327 { 90, CAM_HFR_MODE_90FPS},
328 { 120, CAM_HFR_MODE_120FPS},
329 { 150, CAM_HFR_MODE_150FPS},
330 { 180, CAM_HFR_MODE_180FPS},
331 { 210, CAM_HFR_MODE_210FPS},
332 { 240, CAM_HFR_MODE_240FPS},
333 { 480, CAM_HFR_MODE_480FPS},
334};
335
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700336const QCamera3HardwareInterface::QCameraMap<
337 qcamera3_ext_instant_aec_mode_t,
338 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
339 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
340 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
341 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
342};
Thierry Strudel3d639192016-09-09 11:52:26 -0700343camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
344 .initialize = QCamera3HardwareInterface::initialize,
345 .configure_streams = QCamera3HardwareInterface::configure_streams,
346 .register_stream_buffers = NULL,
347 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
348 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
349 .get_metadata_vendor_tag_ops = NULL,
350 .dump = QCamera3HardwareInterface::dump,
351 .flush = QCamera3HardwareInterface::flush,
352 .reserved = {0},
353};
354
355// initialise to some default value
356uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
357
358/*===========================================================================
359 * FUNCTION : QCamera3HardwareInterface
360 *
361 * DESCRIPTION: constructor of QCamera3HardwareInterface
362 *
363 * PARAMETERS :
364 * @cameraId : camera ID
365 *
366 * RETURN : none
367 *==========================================================================*/
368QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
369 const camera_module_callbacks_t *callbacks)
370 : mCameraId(cameraId),
371 mCameraHandle(NULL),
372 mCameraInitialized(false),
373 mCallbackOps(NULL),
374 mMetadataChannel(NULL),
375 mPictureChannel(NULL),
376 mRawChannel(NULL),
377 mSupportChannel(NULL),
378 mAnalysisChannel(NULL),
379 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700380 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700381 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800382 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700383 mCommon(),
384 mChannelHandle(0),
385 mFirstConfiguration(true),
386 mFlush(false),
387 mFlushPerf(false),
388 mParamHeap(NULL),
389 mParameters(NULL),
390 mPrevParameters(NULL),
391 m_bIsVideo(false),
392 m_bIs4KVideo(false),
393 m_bEisSupportedSize(false),
394 m_bEisEnable(false),
395 m_MobicatMask(0),
396 mMinProcessedFrameDuration(0),
397 mMinJpegFrameDuration(0),
398 mMinRawFrameDuration(0),
399 mMetaFrameCount(0U),
400 mUpdateDebugLevel(false),
401 mCallbacks(callbacks),
402 mCaptureIntent(0),
403 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800404 /* DevCamDebug metadata internal m control*/
405 mDevCamDebugMetaEnable(0),
406 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700407 mBatchSize(0),
408 mToBeQueuedVidBufs(0),
409 mHFRVideoFps(DEFAULT_VIDEO_FPS),
410 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
411 mFirstFrameNumberInBatch(0),
412 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800413 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700414 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
415 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700416 mInstantAEC(false),
417 mResetInstantAEC(false),
418 mInstantAECSettledFrameNumber(0),
419 mAecSkipDisplayFrameBound(0),
420 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700422 mLastCustIntentFrmNum(-1),
423 mState(CLOSED),
424 mIsDeviceLinked(false),
425 mIsMainCamera(true),
426 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700427 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800428 m_pDualCamCmdPtr(NULL),
429 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700430{
431 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCommon.init(gCamCapability[cameraId]);
433 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700434#ifndef USE_HAL_3_3
435 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
436#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700437 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700438#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700439 mCameraDevice.common.close = close_camera_device;
440 mCameraDevice.ops = &mCameraOps;
441 mCameraDevice.priv = this;
442 gCamCapability[cameraId]->version = CAM_HAL_V3;
443 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
444 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
445 gCamCapability[cameraId]->min_num_pp_bufs = 3;
446
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800447 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700448
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800449 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700450 mPendingLiveRequest = 0;
451 mCurrentRequestId = -1;
452 pthread_mutex_init(&mMutex, NULL);
453
454 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
455 mDefaultMetadata[i] = NULL;
456
457 // Getting system props of different kinds
458 char prop[PROPERTY_VALUE_MAX];
459 memset(prop, 0, sizeof(prop));
460 property_get("persist.camera.raw.dump", prop, "0");
461 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800462 property_get("persist.camera.hal3.force.hdr", prop, "0");
463 mForceHdrSnapshot = atoi(prop);
464
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 if (mEnableRawDump)
466 LOGD("Raw dump from Camera HAL enabled");
467
468 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
469 memset(mLdafCalib, 0, sizeof(mLdafCalib));
470
471 memset(prop, 0, sizeof(prop));
472 property_get("persist.camera.tnr.preview", prop, "0");
473 m_bTnrPreview = (uint8_t)atoi(prop);
474
475 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800476 property_get("persist.camera.swtnr.preview", prop, "1");
477 m_bSwTnrPreview = (uint8_t)atoi(prop);
478
479 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700480 property_get("persist.camera.tnr.video", prop, "0");
481 m_bTnrVideo = (uint8_t)atoi(prop);
482
483 memset(prop, 0, sizeof(prop));
484 property_get("persist.camera.avtimer.debug", prop, "0");
485 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800486 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700487
488 //Load and read GPU library.
489 lib_surface_utils = NULL;
490 LINK_get_surface_pixel_alignment = NULL;
491 mSurfaceStridePadding = CAM_PAD_TO_32;
492 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
493 if (lib_surface_utils) {
494 *(void **)&LINK_get_surface_pixel_alignment =
495 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
496 if (LINK_get_surface_pixel_alignment) {
497 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
498 }
499 dlclose(lib_surface_utils);
500 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700501
502 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700503}
504
505/*===========================================================================
506 * FUNCTION : ~QCamera3HardwareInterface
507 *
508 * DESCRIPTION: destructor of QCamera3HardwareInterface
509 *
510 * PARAMETERS : none
511 *
512 * RETURN : none
513 *==========================================================================*/
514QCamera3HardwareInterface::~QCamera3HardwareInterface()
515{
516 LOGD("E");
517
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800518 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700519
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800520 // Disable power hint and enable the perf lock for close camera
521 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
522 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
523
524 // unlink of dualcam during close camera
525 if (mIsDeviceLinked) {
526 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
527 &m_pDualCamCmdPtr->bundle_info;
528 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
529 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
530 pthread_mutex_lock(&gCamLock);
531
532 if (mIsMainCamera == 1) {
533 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
534 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
535 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
536 // related session id should be session id of linked session
537 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
538 } else {
539 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
540 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
541 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
542 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
543 }
544 pthread_mutex_unlock(&gCamLock);
545
546 rc = mCameraHandle->ops->set_dual_cam_cmd(
547 mCameraHandle->camera_handle);
548 if (rc < 0) {
549 LOGE("Dualcam: Unlink failed, but still proceed to close");
550 }
551 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700552
553 /* We need to stop all streams before deleting any stream */
554 if (mRawDumpChannel) {
555 mRawDumpChannel->stop();
556 }
557
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700558 if (mHdrPlusRawSrcChannel) {
559 mHdrPlusRawSrcChannel->stop();
560 }
561
Thierry Strudel3d639192016-09-09 11:52:26 -0700562 // NOTE: 'camera3_stream_t *' objects are already freed at
563 // this stage by the framework
564 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
565 it != mStreamInfo.end(); it++) {
566 QCamera3ProcessingChannel *channel = (*it)->channel;
567 if (channel) {
568 channel->stop();
569 }
570 }
571 if (mSupportChannel)
572 mSupportChannel->stop();
573
574 if (mAnalysisChannel) {
575 mAnalysisChannel->stop();
576 }
577 if (mMetadataChannel) {
578 mMetadataChannel->stop();
579 }
580 if (mChannelHandle) {
581 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
582 mChannelHandle);
583 LOGD("stopping channel %d", mChannelHandle);
584 }
585
586 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
587 it != mStreamInfo.end(); it++) {
588 QCamera3ProcessingChannel *channel = (*it)->channel;
589 if (channel)
590 delete channel;
591 free (*it);
592 }
593 if (mSupportChannel) {
594 delete mSupportChannel;
595 mSupportChannel = NULL;
596 }
597
598 if (mAnalysisChannel) {
599 delete mAnalysisChannel;
600 mAnalysisChannel = NULL;
601 }
602 if (mRawDumpChannel) {
603 delete mRawDumpChannel;
604 mRawDumpChannel = NULL;
605 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700606 if (mHdrPlusRawSrcChannel) {
607 delete mHdrPlusRawSrcChannel;
608 mHdrPlusRawSrcChannel = NULL;
609 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700610 if (mDummyBatchChannel) {
611 delete mDummyBatchChannel;
612 mDummyBatchChannel = NULL;
613 }
614
615 mPictureChannel = NULL;
616
617 if (mMetadataChannel) {
618 delete mMetadataChannel;
619 mMetadataChannel = NULL;
620 }
621
622 /* Clean up all channels */
623 if (mCameraInitialized) {
624 if(!mFirstConfiguration){
625 //send the last unconfigure
626 cam_stream_size_info_t stream_config_info;
627 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
628 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
629 stream_config_info.buffer_info.max_buffers =
630 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700631 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700632 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
633 stream_config_info);
634 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
635 if (rc < 0) {
636 LOGE("set_parms failed for unconfigure");
637 }
638 }
639 deinitParameters();
640 }
641
642 if (mChannelHandle) {
643 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
644 mChannelHandle);
645 LOGH("deleting channel %d", mChannelHandle);
646 mChannelHandle = 0;
647 }
648
649 if (mState != CLOSED)
650 closeCamera();
651
652 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
653 req.mPendingBufferList.clear();
654 }
655 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700656 for (pendingRequestIterator i = mPendingRequestsList.begin();
657 i != mPendingRequestsList.end();) {
658 i = erasePendingRequest(i);
659 }
660 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
661 if (mDefaultMetadata[i])
662 free_camera_metadata(mDefaultMetadata[i]);
663
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800664 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700665
666 pthread_cond_destroy(&mRequestCond);
667
668 pthread_cond_destroy(&mBuffersCond);
669
670 pthread_mutex_destroy(&mMutex);
671 LOGD("X");
672}
673
674/*===========================================================================
675 * FUNCTION : erasePendingRequest
676 *
677 * DESCRIPTION: function to erase a desired pending request after freeing any
678 * allocated memory
679 *
680 * PARAMETERS :
681 * @i : iterator pointing to pending request to be erased
682 *
683 * RETURN : iterator pointing to the next request
684 *==========================================================================*/
685QCamera3HardwareInterface::pendingRequestIterator
686 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
687{
688 if (i->input_buffer != NULL) {
689 free(i->input_buffer);
690 i->input_buffer = NULL;
691 }
692 if (i->settings != NULL)
693 free_camera_metadata((camera_metadata_t*)i->settings);
694 return mPendingRequestsList.erase(i);
695}
696
697/*===========================================================================
698 * FUNCTION : camEvtHandle
699 *
700 * DESCRIPTION: Function registered to mm-camera-interface to handle events
701 *
702 * PARAMETERS :
703 * @camera_handle : interface layer camera handle
704 * @evt : ptr to event
705 * @user_data : user data ptr
706 *
707 * RETURN : none
708 *==========================================================================*/
709void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
710 mm_camera_event_t *evt,
711 void *user_data)
712{
713 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
714 if (obj && evt) {
715 switch(evt->server_event_type) {
716 case CAM_EVENT_TYPE_DAEMON_DIED:
717 pthread_mutex_lock(&obj->mMutex);
718 obj->mState = ERROR;
719 pthread_mutex_unlock(&obj->mMutex);
720 LOGE("Fatal, camera daemon died");
721 break;
722
723 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
724 LOGD("HAL got request pull from Daemon");
725 pthread_mutex_lock(&obj->mMutex);
726 obj->mWokenUpByDaemon = true;
727 obj->unblockRequestIfNecessary();
728 pthread_mutex_unlock(&obj->mMutex);
729 break;
730
731 default:
732 LOGW("Warning: Unhandled event %d",
733 evt->server_event_type);
734 break;
735 }
736 } else {
737 LOGE("NULL user_data/evt");
738 }
739}
740
741/*===========================================================================
742 * FUNCTION : openCamera
743 *
744 * DESCRIPTION: open camera
745 *
746 * PARAMETERS :
747 * @hw_device : double ptr for camera device struct
748 *
749 * RETURN : int32_t type of status
750 * NO_ERROR -- success
751 * none-zero failure code
752 *==========================================================================*/
753int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
754{
755 int rc = 0;
756 if (mState != CLOSED) {
757 *hw_device = NULL;
758 return PERMISSION_DENIED;
759 }
760
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800761 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
763 mCameraId);
764
765 rc = openCamera();
766 if (rc == 0) {
767 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700769 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800770 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700771
Thierry Strudel3d639192016-09-09 11:52:26 -0700772 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
773 mCameraId, rc);
774
775 if (rc == NO_ERROR) {
776 mState = OPENED;
777 }
778 return rc;
779}
780
781/*===========================================================================
782 * FUNCTION : openCamera
783 *
784 * DESCRIPTION: open camera
785 *
786 * PARAMETERS : none
787 *
788 * RETURN : int32_t type of status
789 * NO_ERROR -- success
790 * none-zero failure code
791 *==========================================================================*/
792int QCamera3HardwareInterface::openCamera()
793{
794 int rc = 0;
795 char value[PROPERTY_VALUE_MAX];
796
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800797 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700798 if (mCameraHandle) {
799 LOGE("Failure: Camera already opened");
800 return ALREADY_EXISTS;
801 }
802
803 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
804 if (rc < 0) {
805 LOGE("Failed to reserve flash for camera id: %d",
806 mCameraId);
807 return UNKNOWN_ERROR;
808 }
809
810 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
811 if (rc) {
812 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
813 return rc;
814 }
815
816 if (!mCameraHandle) {
817 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
818 return -ENODEV;
819 }
820
821 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
822 camEvtHandle, (void *)this);
823
824 if (rc < 0) {
825 LOGE("Error, failed to register event callback");
826 /* Not closing camera here since it is already handled in destructor */
827 return FAILED_TRANSACTION;
828 }
829
830 mExifParams.debug_params =
831 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
832 if (mExifParams.debug_params) {
833 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
834 } else {
835 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
836 return NO_MEMORY;
837 }
838 mFirstConfiguration = true;
839
840 //Notify display HAL that a camera session is active.
841 //But avoid calling the same during bootup because camera service might open/close
842 //cameras at boot time during its initialization and display service will also internally
843 //wait for camera service to initialize first while calling this display API, resulting in a
844 //deadlock situation. Since boot time camera open/close calls are made only to fetch
845 //capabilities, no need of this display bw optimization.
846 //Use "service.bootanim.exit" property to know boot status.
847 property_get("service.bootanim.exit", value, "0");
848 if (atoi(value) == 1) {
849 pthread_mutex_lock(&gCamLock);
850 if (gNumCameraSessions++ == 0) {
851 setCameraLaunchStatus(true);
852 }
853 pthread_mutex_unlock(&gCamLock);
854 }
855
856 //fill the session id needed while linking dual cam
857 pthread_mutex_lock(&gCamLock);
858 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
859 &sessionId[mCameraId]);
860 pthread_mutex_unlock(&gCamLock);
861
862 if (rc < 0) {
863 LOGE("Error, failed to get sessiion id");
864 return UNKNOWN_ERROR;
865 } else {
866 //Allocate related cam sync buffer
867 //this is needed for the payload that goes along with bundling cmd for related
868 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700869 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
870 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700871 if(rc != OK) {
872 rc = NO_MEMORY;
873 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
874 return NO_MEMORY;
875 }
876
877 //Map memory for related cam sync buffer
878 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700879 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
880 m_pDualCamCmdHeap->getFd(0),
881 sizeof(cam_dual_camera_cmd_info_t),
882 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700883 if(rc < 0) {
884 LOGE("Dualcam: failed to map Related cam sync buffer");
885 rc = FAILED_TRANSACTION;
886 return NO_MEMORY;
887 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700888 m_pDualCamCmdPtr =
889 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700890 }
891
892 LOGH("mCameraId=%d",mCameraId);
893
894 return NO_ERROR;
895}
896
897/*===========================================================================
898 * FUNCTION : closeCamera
899 *
900 * DESCRIPTION: close camera
901 *
902 * PARAMETERS : none
903 *
904 * RETURN : int32_t type of status
905 * NO_ERROR -- success
906 * none-zero failure code
907 *==========================================================================*/
908int QCamera3HardwareInterface::closeCamera()
909{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800910 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 int rc = NO_ERROR;
912 char value[PROPERTY_VALUE_MAX];
913
914 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
915 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700916
917 // unmap memory for related cam sync buffer
918 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800919 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700920 if (NULL != m_pDualCamCmdHeap) {
921 m_pDualCamCmdHeap->deallocate();
922 delete m_pDualCamCmdHeap;
923 m_pDualCamCmdHeap = NULL;
924 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700925 }
926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
928 mCameraHandle = NULL;
929
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700930 // Disconnect from HDR+ client.
931 if (mHdrPlusClient != nullptr) {
932 mHdrPlusClient->disconnect();
933 mHdrPlusClient = nullptr;
934 }
935
Thierry Strudel3d639192016-09-09 11:52:26 -0700936 //reset session id to some invalid id
937 pthread_mutex_lock(&gCamLock);
938 sessionId[mCameraId] = 0xDEADBEEF;
939 pthread_mutex_unlock(&gCamLock);
940
941 //Notify display HAL that there is no active camera session
942 //but avoid calling the same during bootup. Refer to openCamera
943 //for more details.
944 property_get("service.bootanim.exit", value, "0");
945 if (atoi(value) == 1) {
946 pthread_mutex_lock(&gCamLock);
947 if (--gNumCameraSessions == 0) {
948 setCameraLaunchStatus(false);
949 }
950 pthread_mutex_unlock(&gCamLock);
951 }
952
Thierry Strudel3d639192016-09-09 11:52:26 -0700953 if (mExifParams.debug_params) {
954 free(mExifParams.debug_params);
955 mExifParams.debug_params = NULL;
956 }
957 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
958 LOGW("Failed to release flash for camera id: %d",
959 mCameraId);
960 }
961 mState = CLOSED;
962 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
963 mCameraId, rc);
964 return rc;
965}
966
967/*===========================================================================
968 * FUNCTION : initialize
969 *
970 * DESCRIPTION: Initialize frameworks callback functions
971 *
972 * PARAMETERS :
973 * @callback_ops : callback function to frameworks
974 *
975 * RETURN :
976 *
977 *==========================================================================*/
978int QCamera3HardwareInterface::initialize(
979 const struct camera3_callback_ops *callback_ops)
980{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800981 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700982 int rc;
983
984 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
985 pthread_mutex_lock(&mMutex);
986
987 // Validate current state
988 switch (mState) {
989 case OPENED:
990 /* valid state */
991 break;
992 default:
993 LOGE("Invalid state %d", mState);
994 rc = -ENODEV;
995 goto err1;
996 }
997
998 rc = initParameters();
999 if (rc < 0) {
1000 LOGE("initParamters failed %d", rc);
1001 goto err1;
1002 }
1003 mCallbackOps = callback_ops;
1004
1005 mChannelHandle = mCameraHandle->ops->add_channel(
1006 mCameraHandle->camera_handle, NULL, NULL, this);
1007 if (mChannelHandle == 0) {
1008 LOGE("add_channel failed");
1009 rc = -ENOMEM;
1010 pthread_mutex_unlock(&mMutex);
1011 return rc;
1012 }
1013
1014 pthread_mutex_unlock(&mMutex);
1015 mCameraInitialized = true;
1016 mState = INITIALIZED;
1017 LOGI("X");
1018 return 0;
1019
1020err1:
1021 pthread_mutex_unlock(&mMutex);
1022 return rc;
1023}
1024
1025/*===========================================================================
1026 * FUNCTION : validateStreamDimensions
1027 *
1028 * DESCRIPTION: Check if the configuration requested are those advertised
1029 *
1030 * PARAMETERS :
1031 * @stream_list : streams to be configured
1032 *
1033 * RETURN :
1034 *
1035 *==========================================================================*/
1036int QCamera3HardwareInterface::validateStreamDimensions(
1037 camera3_stream_configuration_t *streamList)
1038{
1039 int rc = NO_ERROR;
1040 size_t count = 0;
1041
1042 camera3_stream_t *inputStream = NULL;
1043 /*
1044 * Loop through all streams to find input stream if it exists*
1045 */
1046 for (size_t i = 0; i< streamList->num_streams; i++) {
1047 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1048 if (inputStream != NULL) {
1049 LOGE("Error, Multiple input streams requested");
1050 return -EINVAL;
1051 }
1052 inputStream = streamList->streams[i];
1053 }
1054 }
1055 /*
1056 * Loop through all streams requested in configuration
1057 * Check if unsupported sizes have been requested on any of them
1058 */
1059 for (size_t j = 0; j < streamList->num_streams; j++) {
1060 bool sizeFound = false;
1061 camera3_stream_t *newStream = streamList->streams[j];
1062
1063 uint32_t rotatedHeight = newStream->height;
1064 uint32_t rotatedWidth = newStream->width;
1065 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1066 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1067 rotatedHeight = newStream->width;
1068 rotatedWidth = newStream->height;
1069 }
1070
1071 /*
1072 * Sizes are different for each type of stream format check against
1073 * appropriate table.
1074 */
1075 switch (newStream->format) {
1076 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1077 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1078 case HAL_PIXEL_FORMAT_RAW10:
1079 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1080 for (size_t i = 0; i < count; i++) {
1081 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1082 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1083 sizeFound = true;
1084 break;
1085 }
1086 }
1087 break;
1088 case HAL_PIXEL_FORMAT_BLOB:
1089 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1090 /* Verify set size against generated sizes table */
1091 for (size_t i = 0; i < count; i++) {
1092 if (((int32_t)rotatedWidth ==
1093 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1094 ((int32_t)rotatedHeight ==
1095 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1096 sizeFound = true;
1097 break;
1098 }
1099 }
1100 break;
1101 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1102 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1103 default:
1104 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1105 || newStream->stream_type == CAMERA3_STREAM_INPUT
1106 || IS_USAGE_ZSL(newStream->usage)) {
1107 if (((int32_t)rotatedWidth ==
1108 gCamCapability[mCameraId]->active_array_size.width) &&
1109 ((int32_t)rotatedHeight ==
1110 gCamCapability[mCameraId]->active_array_size.height)) {
1111 sizeFound = true;
1112 break;
1113 }
1114 /* We could potentially break here to enforce ZSL stream
1115 * set from frameworks always is full active array size
1116 * but it is not clear from the spc if framework will always
1117 * follow that, also we have logic to override to full array
1118 * size, so keeping the logic lenient at the moment
1119 */
1120 }
1121 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1122 MAX_SIZES_CNT);
1123 for (size_t i = 0; i < count; i++) {
1124 if (((int32_t)rotatedWidth ==
1125 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1126 ((int32_t)rotatedHeight ==
1127 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1128 sizeFound = true;
1129 break;
1130 }
1131 }
1132 break;
1133 } /* End of switch(newStream->format) */
1134
1135 /* We error out even if a single stream has unsupported size set */
1136 if (!sizeFound) {
1137 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1138 rotatedWidth, rotatedHeight, newStream->format,
1139 gCamCapability[mCameraId]->active_array_size.width,
1140 gCamCapability[mCameraId]->active_array_size.height);
1141 rc = -EINVAL;
1142 break;
1143 }
1144 } /* End of for each stream */
1145 return rc;
1146}
1147
1148/*==============================================================================
1149 * FUNCTION : isSupportChannelNeeded
1150 *
1151 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1152 *
1153 * PARAMETERS :
1154 * @stream_list : streams to be configured
1155 * @stream_config_info : the config info for streams to be configured
1156 *
1157 * RETURN : Boolen true/false decision
1158 *
1159 *==========================================================================*/
1160bool QCamera3HardwareInterface::isSupportChannelNeeded(
1161 camera3_stream_configuration_t *streamList,
1162 cam_stream_size_info_t stream_config_info)
1163{
1164 uint32_t i;
1165 bool pprocRequested = false;
1166 /* Check for conditions where PProc pipeline does not have any streams*/
1167 for (i = 0; i < stream_config_info.num_streams; i++) {
1168 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1169 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1170 pprocRequested = true;
1171 break;
1172 }
1173 }
1174
1175 if (pprocRequested == false )
1176 return true;
1177
1178 /* Dummy stream needed if only raw or jpeg streams present */
1179 for (i = 0; i < streamList->num_streams; i++) {
1180 switch(streamList->streams[i]->format) {
1181 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1182 case HAL_PIXEL_FORMAT_RAW10:
1183 case HAL_PIXEL_FORMAT_RAW16:
1184 case HAL_PIXEL_FORMAT_BLOB:
1185 break;
1186 default:
1187 return false;
1188 }
1189 }
1190 return true;
1191}
1192
1193/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001194 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001195 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001196 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001197 *
1198 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001199 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001200 *
1201 * RETURN : int32_t type of status
1202 * NO_ERROR -- success
1203 * none-zero failure code
1204 *
1205 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001206int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001207{
1208 int32_t rc = NO_ERROR;
1209
1210 cam_dimension_t max_dim = {0, 0};
1211 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1212 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1213 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1214 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1215 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1216 }
1217
1218 clear_metadata_buffer(mParameters);
1219
1220 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1221 max_dim);
1222 if (rc != NO_ERROR) {
1223 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1224 return rc;
1225 }
1226
1227 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1228 if (rc != NO_ERROR) {
1229 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1230 return rc;
1231 }
1232
1233 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001234 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001235
1236 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1237 mParameters);
1238 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001239 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 return rc;
1241 }
1242
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001243 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1244 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1245 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1246 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1247 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001248
1249 return rc;
1250}
1251
1252/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001253 * FUNCTION : addToPPFeatureMask
1254 *
1255 * DESCRIPTION: add additional features to pp feature mask based on
1256 * stream type and usecase
1257 *
1258 * PARAMETERS :
1259 * @stream_format : stream type for feature mask
1260 * @stream_idx : stream idx within postprocess_mask list to change
1261 *
1262 * RETURN : NULL
1263 *
1264 *==========================================================================*/
1265void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1266 uint32_t stream_idx)
1267{
1268 char feature_mask_value[PROPERTY_VALUE_MAX];
1269 cam_feature_mask_t feature_mask;
1270 int args_converted;
1271 int property_len;
1272
1273 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001274#ifdef _LE_CAMERA_
1275 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1276 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1277 property_len = property_get("persist.camera.hal3.feature",
1278 feature_mask_value, swtnr_feature_mask_value);
1279#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001280 property_len = property_get("persist.camera.hal3.feature",
1281 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001282#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001283 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1284 (feature_mask_value[1] == 'x')) {
1285 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1286 } else {
1287 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1288 }
1289 if (1 != args_converted) {
1290 feature_mask = 0;
1291 LOGE("Wrong feature mask %s", feature_mask_value);
1292 return;
1293 }
1294
1295 switch (stream_format) {
1296 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1297 /* Add LLVD to pp feature mask only if video hint is enabled */
1298 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1299 mStreamConfigInfo.postprocess_mask[stream_idx]
1300 |= CAM_QTI_FEATURE_SW_TNR;
1301 LOGH("Added SW TNR to pp feature mask");
1302 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1303 mStreamConfigInfo.postprocess_mask[stream_idx]
1304 |= CAM_QCOM_FEATURE_LLVD;
1305 LOGH("Added LLVD SeeMore to pp feature mask");
1306 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001307 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1308 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1309 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001311 break;
1312 }
1313 default:
1314 break;
1315 }
1316 LOGD("PP feature mask %llx",
1317 mStreamConfigInfo.postprocess_mask[stream_idx]);
1318}
1319
1320/*==============================================================================
1321 * FUNCTION : updateFpsInPreviewBuffer
1322 *
1323 * DESCRIPTION: update FPS information in preview buffer.
1324 *
1325 * PARAMETERS :
1326 * @metadata : pointer to metadata buffer
1327 * @frame_number: frame_number to look for in pending buffer list
1328 *
1329 * RETURN : None
1330 *
1331 *==========================================================================*/
1332void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1333 uint32_t frame_number)
1334{
1335 // Mark all pending buffers for this particular request
1336 // with corresponding framerate information
1337 for (List<PendingBuffersInRequest>::iterator req =
1338 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1339 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1340 for(List<PendingBufferInfo>::iterator j =
1341 req->mPendingBufferList.begin();
1342 j != req->mPendingBufferList.end(); j++) {
1343 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1344 if ((req->frame_number == frame_number) &&
1345 (channel->getStreamTypeMask() &
1346 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1347 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1348 CAM_INTF_PARM_FPS_RANGE, metadata) {
1349 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1350 struct private_handle_t *priv_handle =
1351 (struct private_handle_t *)(*(j->buffer));
1352 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1353 }
1354 }
1355 }
1356 }
1357}
1358
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001359/*==============================================================================
1360 * FUNCTION : updateTimeStampInPendingBuffers
1361 *
1362 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1363 * of a frame number
1364 *
1365 * PARAMETERS :
1366 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1367 * @timestamp : timestamp to be set
1368 *
1369 * RETURN : None
1370 *
1371 *==========================================================================*/
1372void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1373 uint32_t frameNumber, nsecs_t timestamp)
1374{
1375 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1376 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1377 if (req->frame_number != frameNumber)
1378 continue;
1379
1380 for (auto k = req->mPendingBufferList.begin();
1381 k != req->mPendingBufferList.end(); k++ ) {
1382 struct private_handle_t *priv_handle =
1383 (struct private_handle_t *) (*(k->buffer));
1384 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1385 }
1386 }
1387 return;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*===========================================================================
1391 * FUNCTION : configureStreams
1392 *
1393 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1394 * and output streams.
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 *
1399 * RETURN :
1400 *
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::configureStreams(
1403 camera3_stream_configuration_t *streamList)
1404{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001405 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 int rc = 0;
1407
1408 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001409 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001410 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001411 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001412
1413 return rc;
1414}
1415
1416/*===========================================================================
1417 * FUNCTION : configureStreamsPerfLocked
1418 *
1419 * DESCRIPTION: configureStreams while perfLock is held.
1420 *
1421 * PARAMETERS :
1422 * @stream_list : streams to be configured
1423 *
1424 * RETURN : int32_t type of status
1425 * NO_ERROR -- success
1426 * none-zero failure code
1427 *==========================================================================*/
1428int QCamera3HardwareInterface::configureStreamsPerfLocked(
1429 camera3_stream_configuration_t *streamList)
1430{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001431 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001432 int rc = 0;
1433
1434 // Sanity check stream_list
1435 if (streamList == NULL) {
1436 LOGE("NULL stream configuration");
1437 return BAD_VALUE;
1438 }
1439 if (streamList->streams == NULL) {
1440 LOGE("NULL stream list");
1441 return BAD_VALUE;
1442 }
1443
1444 if (streamList->num_streams < 1) {
1445 LOGE("Bad number of streams requested: %d",
1446 streamList->num_streams);
1447 return BAD_VALUE;
1448 }
1449
1450 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1451 LOGE("Maximum number of streams %d exceeded: %d",
1452 MAX_NUM_STREAMS, streamList->num_streams);
1453 return BAD_VALUE;
1454 }
1455
1456 mOpMode = streamList->operation_mode;
1457 LOGD("mOpMode: %d", mOpMode);
1458
1459 /* first invalidate all the steams in the mStreamList
1460 * if they appear again, they will be validated */
1461 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1462 it != mStreamInfo.end(); it++) {
1463 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1464 if (channel) {
1465 channel->stop();
1466 }
1467 (*it)->status = INVALID;
1468 }
1469
1470 if (mRawDumpChannel) {
1471 mRawDumpChannel->stop();
1472 delete mRawDumpChannel;
1473 mRawDumpChannel = NULL;
1474 }
1475
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 if (mHdrPlusRawSrcChannel) {
1477 mHdrPlusRawSrcChannel->stop();
1478 delete mHdrPlusRawSrcChannel;
1479 mHdrPlusRawSrcChannel = NULL;
1480 }
1481
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 if (mSupportChannel)
1483 mSupportChannel->stop();
1484
1485 if (mAnalysisChannel) {
1486 mAnalysisChannel->stop();
1487 }
1488 if (mMetadataChannel) {
1489 /* If content of mStreamInfo is not 0, there is metadata stream */
1490 mMetadataChannel->stop();
1491 }
1492 if (mChannelHandle) {
1493 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1494 mChannelHandle);
1495 LOGD("stopping channel %d", mChannelHandle);
1496 }
1497
1498 pthread_mutex_lock(&mMutex);
1499
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500 // Check if HDR+ is enabled.
1501 char prop[PROPERTY_VALUE_MAX];
1502 property_get("persist.camera.hdrplus", prop, "0");
1503 bool enableHdrPlus = atoi(prop);
1504 if (enableHdrPlus) {
1505 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1506 // Connect to HDR+ client if not yet.
1507 if (mHdrPlusClient == nullptr) {
1508 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1509 rc = mHdrPlusClient->connect(this);
1510 if (rc < 0) {
1511 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1512 strerror(-rc), rc);
1513 pthread_mutex_unlock(&mMutex);
1514 return -ENODEV;
1515 }
1516
1517 // Set static metadata.
1518 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1519 if (rc < 0) {
1520 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1521 strerror(-rc), rc);
1522 pthread_mutex_unlock(&mMutex);
1523 return -ENODEV;
1524 }
1525 }
1526 } else {
1527 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1528 // Disconnect from HDR+ client if HDR+ is not enabled.
1529 if (mHdrPlusClient != nullptr) {
1530 mHdrPlusClient->disconnect();
1531 mHdrPlusClient = nullptr;
1532 }
1533 }
1534
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 // Check state
1536 switch (mState) {
1537 case INITIALIZED:
1538 case CONFIGURED:
1539 case STARTED:
1540 /* valid state */
1541 break;
1542 default:
1543 LOGE("Invalid state %d", mState);
1544 pthread_mutex_unlock(&mMutex);
1545 return -ENODEV;
1546 }
1547
1548 /* Check whether we have video stream */
1549 m_bIs4KVideo = false;
1550 m_bIsVideo = false;
1551 m_bEisSupportedSize = false;
1552 m_bTnrEnabled = false;
1553 bool isZsl = false;
1554 uint32_t videoWidth = 0U;
1555 uint32_t videoHeight = 0U;
1556 size_t rawStreamCnt = 0;
1557 size_t stallStreamCnt = 0;
1558 size_t processedStreamCnt = 0;
1559 // Number of streams on ISP encoder path
1560 size_t numStreamsOnEncoder = 0;
1561 size_t numYuv888OnEncoder = 0;
1562 bool bYuv888OverrideJpeg = false;
1563 cam_dimension_t largeYuv888Size = {0, 0};
1564 cam_dimension_t maxViewfinderSize = {0, 0};
1565 bool bJpegExceeds4K = false;
1566 bool bJpegOnEncoder = false;
1567 bool bUseCommonFeatureMask = false;
1568 cam_feature_mask_t commonFeatureMask = 0;
1569 bool bSmallJpegSize = false;
1570 uint32_t width_ratio;
1571 uint32_t height_ratio;
1572 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1573 camera3_stream_t *inputStream = NULL;
1574 bool isJpeg = false;
1575 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001576 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001577
1578 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1579
1580 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001581 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001582 uint8_t eis_prop_set;
1583 uint32_t maxEisWidth = 0;
1584 uint32_t maxEisHeight = 0;
1585
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001586 // Initialize all instant AEC related variables
1587 mInstantAEC = false;
1588 mResetInstantAEC = false;
1589 mInstantAECSettledFrameNumber = 0;
1590 mAecSkipDisplayFrameBound = 0;
1591 mInstantAecFrameIdxCount = 0;
1592
Thierry Strudel3d639192016-09-09 11:52:26 -07001593 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1594
1595 size_t count = IS_TYPE_MAX;
1596 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1597 for (size_t i = 0; i < count; i++) {
1598 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001599 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1600 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001601 break;
1602 }
1603 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001604 count = CAM_OPT_STAB_MAX;
1605 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1606 for (size_t i = 0; i < count; i++) {
1607 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1608 oisSupported = true;
1609 break;
1610 }
1611 }
1612
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001613 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001614 maxEisWidth = MAX_EIS_WIDTH;
1615 maxEisHeight = MAX_EIS_HEIGHT;
1616 }
1617
1618 /* EIS setprop control */
1619 char eis_prop[PROPERTY_VALUE_MAX];
1620 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001621 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001622 eis_prop_set = (uint8_t)atoi(eis_prop);
1623
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001624 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001625 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1626
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001627 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1628 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1629
Thierry Strudel3d639192016-09-09 11:52:26 -07001630 /* stream configurations */
1631 for (size_t i = 0; i < streamList->num_streams; i++) {
1632 camera3_stream_t *newStream = streamList->streams[i];
1633 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1634 "height = %d, rotation = %d, usage = 0x%x",
1635 i, newStream->stream_type, newStream->format,
1636 newStream->width, newStream->height, newStream->rotation,
1637 newStream->usage);
1638 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1639 newStream->stream_type == CAMERA3_STREAM_INPUT){
1640 isZsl = true;
1641 }
1642 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1643 inputStream = newStream;
1644 }
1645
1646 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1647 isJpeg = true;
1648 jpegSize.width = newStream->width;
1649 jpegSize.height = newStream->height;
1650 if (newStream->width > VIDEO_4K_WIDTH ||
1651 newStream->height > VIDEO_4K_HEIGHT)
1652 bJpegExceeds4K = true;
1653 }
1654
1655 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1656 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1657 m_bIsVideo = true;
1658 videoWidth = newStream->width;
1659 videoHeight = newStream->height;
1660 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1661 (VIDEO_4K_HEIGHT <= newStream->height)) {
1662 m_bIs4KVideo = true;
1663 }
1664 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1665 (newStream->height <= maxEisHeight);
1666 }
1667 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1668 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1669 switch (newStream->format) {
1670 case HAL_PIXEL_FORMAT_BLOB:
1671 stallStreamCnt++;
1672 if (isOnEncoder(maxViewfinderSize, newStream->width,
1673 newStream->height)) {
1674 numStreamsOnEncoder++;
1675 bJpegOnEncoder = true;
1676 }
1677 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1678 newStream->width);
1679 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1680 newStream->height);;
1681 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1682 "FATAL: max_downscale_factor cannot be zero and so assert");
1683 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1684 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1685 LOGH("Setting small jpeg size flag to true");
1686 bSmallJpegSize = true;
1687 }
1688 break;
1689 case HAL_PIXEL_FORMAT_RAW10:
1690 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1691 case HAL_PIXEL_FORMAT_RAW16:
1692 rawStreamCnt++;
1693 break;
1694 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1695 processedStreamCnt++;
1696 if (isOnEncoder(maxViewfinderSize, newStream->width,
1697 newStream->height)) {
1698 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1699 !IS_USAGE_ZSL(newStream->usage)) {
1700 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1701 }
1702 numStreamsOnEncoder++;
1703 }
1704 break;
1705 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1706 processedStreamCnt++;
1707 if (isOnEncoder(maxViewfinderSize, newStream->width,
1708 newStream->height)) {
1709 // If Yuv888 size is not greater than 4K, set feature mask
1710 // to SUPERSET so that it support concurrent request on
1711 // YUV and JPEG.
1712 if (newStream->width <= VIDEO_4K_WIDTH &&
1713 newStream->height <= VIDEO_4K_HEIGHT) {
1714 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1715 }
1716 numStreamsOnEncoder++;
1717 numYuv888OnEncoder++;
1718 largeYuv888Size.width = newStream->width;
1719 largeYuv888Size.height = newStream->height;
1720 }
1721 break;
1722 default:
1723 processedStreamCnt++;
1724 if (isOnEncoder(maxViewfinderSize, newStream->width,
1725 newStream->height)) {
1726 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1727 numStreamsOnEncoder++;
1728 }
1729 break;
1730 }
1731
1732 }
1733 }
1734
1735 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1736 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1737 !m_bIsVideo) {
1738 m_bEisEnable = false;
1739 }
1740
1741 /* Logic to enable/disable TNR based on specific config size/etc.*/
1742 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1743 ((videoWidth == 1920 && videoHeight == 1080) ||
1744 (videoWidth == 1280 && videoHeight == 720)) &&
1745 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1746 m_bTnrEnabled = true;
1747
1748 /* Check if num_streams is sane */
1749 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1750 rawStreamCnt > MAX_RAW_STREAMS ||
1751 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1752 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1753 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1754 pthread_mutex_unlock(&mMutex);
1755 return -EINVAL;
1756 }
1757 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001758 if (isZsl && m_bIs4KVideo) {
1759 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001760 pthread_mutex_unlock(&mMutex);
1761 return -EINVAL;
1762 }
1763 /* Check if stream sizes are sane */
1764 if (numStreamsOnEncoder > 2) {
1765 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1766 pthread_mutex_unlock(&mMutex);
1767 return -EINVAL;
1768 } else if (1 < numStreamsOnEncoder){
1769 bUseCommonFeatureMask = true;
1770 LOGH("Multiple streams above max viewfinder size, common mask needed");
1771 }
1772
1773 /* Check if BLOB size is greater than 4k in 4k recording case */
1774 if (m_bIs4KVideo && bJpegExceeds4K) {
1775 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1776 pthread_mutex_unlock(&mMutex);
1777 return -EINVAL;
1778 }
1779
1780 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1781 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1782 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1783 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1784 // configurations:
1785 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1786 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1787 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1788 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1789 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1790 __func__);
1791 pthread_mutex_unlock(&mMutex);
1792 return -EINVAL;
1793 }
1794
1795 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1796 // the YUV stream's size is greater or equal to the JPEG size, set common
1797 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1798 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1799 jpegSize.width, jpegSize.height) &&
1800 largeYuv888Size.width > jpegSize.width &&
1801 largeYuv888Size.height > jpegSize.height) {
1802 bYuv888OverrideJpeg = true;
1803 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1804 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1805 }
1806
1807 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1808 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1809 commonFeatureMask);
1810 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1811 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1812
1813 rc = validateStreamDimensions(streamList);
1814 if (rc == NO_ERROR) {
1815 rc = validateStreamRotations(streamList);
1816 }
1817 if (rc != NO_ERROR) {
1818 LOGE("Invalid stream configuration requested!");
1819 pthread_mutex_unlock(&mMutex);
1820 return rc;
1821 }
1822
1823 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1824 for (size_t i = 0; i < streamList->num_streams; i++) {
1825 camera3_stream_t *newStream = streamList->streams[i];
1826 LOGH("newStream type = %d, stream format = %d "
1827 "stream size : %d x %d, stream rotation = %d",
1828 newStream->stream_type, newStream->format,
1829 newStream->width, newStream->height, newStream->rotation);
1830 //if the stream is in the mStreamList validate it
1831 bool stream_exists = false;
1832 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1833 it != mStreamInfo.end(); it++) {
1834 if ((*it)->stream == newStream) {
1835 QCamera3ProcessingChannel *channel =
1836 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1837 stream_exists = true;
1838 if (channel)
1839 delete channel;
1840 (*it)->status = VALID;
1841 (*it)->stream->priv = NULL;
1842 (*it)->channel = NULL;
1843 }
1844 }
1845 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1846 //new stream
1847 stream_info_t* stream_info;
1848 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1849 if (!stream_info) {
1850 LOGE("Could not allocate stream info");
1851 rc = -ENOMEM;
1852 pthread_mutex_unlock(&mMutex);
1853 return rc;
1854 }
1855 stream_info->stream = newStream;
1856 stream_info->status = VALID;
1857 stream_info->channel = NULL;
1858 mStreamInfo.push_back(stream_info);
1859 }
1860 /* Covers Opaque ZSL and API1 F/W ZSL */
1861 if (IS_USAGE_ZSL(newStream->usage)
1862 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1863 if (zslStream != NULL) {
1864 LOGE("Multiple input/reprocess streams requested!");
1865 pthread_mutex_unlock(&mMutex);
1866 return BAD_VALUE;
1867 }
1868 zslStream = newStream;
1869 }
1870 /* Covers YUV reprocess */
1871 if (inputStream != NULL) {
1872 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1873 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1874 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1875 && inputStream->width == newStream->width
1876 && inputStream->height == newStream->height) {
1877 if (zslStream != NULL) {
1878 /* This scenario indicates multiple YUV streams with same size
1879 * as input stream have been requested, since zsl stream handle
1880 * is solely use for the purpose of overriding the size of streams
1881 * which share h/w streams we will just make a guess here as to
1882 * which of the stream is a ZSL stream, this will be refactored
1883 * once we make generic logic for streams sharing encoder output
1884 */
1885 LOGH("Warning, Multiple ip/reprocess streams requested!");
1886 }
1887 zslStream = newStream;
1888 }
1889 }
1890 }
1891
1892 /* If a zsl stream is set, we know that we have configured at least one input or
1893 bidirectional stream */
1894 if (NULL != zslStream) {
1895 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1896 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1897 mInputStreamInfo.format = zslStream->format;
1898 mInputStreamInfo.usage = zslStream->usage;
1899 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1900 mInputStreamInfo.dim.width,
1901 mInputStreamInfo.dim.height,
1902 mInputStreamInfo.format, mInputStreamInfo.usage);
1903 }
1904
1905 cleanAndSortStreamInfo();
1906 if (mMetadataChannel) {
1907 delete mMetadataChannel;
1908 mMetadataChannel = NULL;
1909 }
1910 if (mSupportChannel) {
1911 delete mSupportChannel;
1912 mSupportChannel = NULL;
1913 }
1914
1915 if (mAnalysisChannel) {
1916 delete mAnalysisChannel;
1917 mAnalysisChannel = NULL;
1918 }
1919
1920 if (mDummyBatchChannel) {
1921 delete mDummyBatchChannel;
1922 mDummyBatchChannel = NULL;
1923 }
1924
1925 //Create metadata channel and initialize it
1926 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1927 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1928 gCamCapability[mCameraId]->color_arrangement);
1929 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1930 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001931 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001932 if (mMetadataChannel == NULL) {
1933 LOGE("failed to allocate metadata channel");
1934 rc = -ENOMEM;
1935 pthread_mutex_unlock(&mMutex);
1936 return rc;
1937 }
1938 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1939 if (rc < 0) {
1940 LOGE("metadata channel initialization failed");
1941 delete mMetadataChannel;
1942 mMetadataChannel = NULL;
1943 pthread_mutex_unlock(&mMutex);
1944 return rc;
1945 }
1946
Thierry Strudel3d639192016-09-09 11:52:26 -07001947 bool isRawStreamRequested = false;
1948 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1949 /* Allocate channel objects for the requested streams */
1950 for (size_t i = 0; i < streamList->num_streams; i++) {
1951 camera3_stream_t *newStream = streamList->streams[i];
1952 uint32_t stream_usage = newStream->usage;
1953 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1954 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1955 struct camera_info *p_info = NULL;
1956 pthread_mutex_lock(&gCamLock);
1957 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1958 pthread_mutex_unlock(&gCamLock);
1959 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1960 || IS_USAGE_ZSL(newStream->usage)) &&
1961 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1962 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1963 if (bUseCommonFeatureMask) {
1964 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1965 commonFeatureMask;
1966 } else {
1967 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1968 CAM_QCOM_FEATURE_NONE;
1969 }
1970
1971 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1972 LOGH("Input stream configured, reprocess config");
1973 } else {
1974 //for non zsl streams find out the format
1975 switch (newStream->format) {
1976 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1977 {
1978 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1979 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1980 /* add additional features to pp feature mask */
1981 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1982 mStreamConfigInfo.num_streams);
1983
1984 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1985 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1986 CAM_STREAM_TYPE_VIDEO;
1987 if (m_bTnrEnabled && m_bTnrVideo) {
1988 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1989 CAM_QCOM_FEATURE_CPP_TNR;
1990 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1991 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1992 ~CAM_QCOM_FEATURE_CDS;
1993 }
1994 } else {
1995 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1996 CAM_STREAM_TYPE_PREVIEW;
1997 if (m_bTnrEnabled && m_bTnrPreview) {
1998 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1999 CAM_QCOM_FEATURE_CPP_TNR;
2000 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2001 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2002 ~CAM_QCOM_FEATURE_CDS;
2003 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002004 if(!m_bSwTnrPreview) {
2005 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2006 ~CAM_QTI_FEATURE_SW_TNR;
2007 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 padding_info.width_padding = mSurfaceStridePadding;
2009 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002010 previewSize.width = (int32_t)newStream->width;
2011 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 }
2013 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2014 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2015 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2016 newStream->height;
2017 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2018 newStream->width;
2019 }
2020 }
2021 break;
2022 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2023 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2024 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2025 if (bUseCommonFeatureMask)
2026 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2027 commonFeatureMask;
2028 else
2029 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2030 CAM_QCOM_FEATURE_NONE;
2031 } else {
2032 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2033 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2034 }
2035 break;
2036 case HAL_PIXEL_FORMAT_BLOB:
2037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2038 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2039 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2040 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2041 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2042 } else {
2043 if (bUseCommonFeatureMask &&
2044 isOnEncoder(maxViewfinderSize, newStream->width,
2045 newStream->height)) {
2046 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2047 } else {
2048 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2049 }
2050 }
2051 if (isZsl) {
2052 if (zslStream) {
2053 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2054 (int32_t)zslStream->width;
2055 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2056 (int32_t)zslStream->height;
2057 } else {
2058 LOGE("Error, No ZSL stream identified");
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062 } else if (m_bIs4KVideo) {
2063 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2064 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2065 } else if (bYuv888OverrideJpeg) {
2066 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2067 (int32_t)largeYuv888Size.width;
2068 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2069 (int32_t)largeYuv888Size.height;
2070 }
2071 break;
2072 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2073 case HAL_PIXEL_FORMAT_RAW16:
2074 case HAL_PIXEL_FORMAT_RAW10:
2075 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2076 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2077 isRawStreamRequested = true;
2078 break;
2079 default:
2080 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2081 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2082 break;
2083 }
2084 }
2085
2086 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2087 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2088 gCamCapability[mCameraId]->color_arrangement);
2089
2090 if (newStream->priv == NULL) {
2091 //New stream, construct channel
2092 switch (newStream->stream_type) {
2093 case CAMERA3_STREAM_INPUT:
2094 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2095 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2096 break;
2097 case CAMERA3_STREAM_BIDIRECTIONAL:
2098 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2099 GRALLOC_USAGE_HW_CAMERA_WRITE;
2100 break;
2101 case CAMERA3_STREAM_OUTPUT:
2102 /* For video encoding stream, set read/write rarely
2103 * flag so that they may be set to un-cached */
2104 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2105 newStream->usage |=
2106 (GRALLOC_USAGE_SW_READ_RARELY |
2107 GRALLOC_USAGE_SW_WRITE_RARELY |
2108 GRALLOC_USAGE_HW_CAMERA_WRITE);
2109 else if (IS_USAGE_ZSL(newStream->usage))
2110 {
2111 LOGD("ZSL usage flag skipping");
2112 }
2113 else if (newStream == zslStream
2114 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2115 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2116 } else
2117 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2118 break;
2119 default:
2120 LOGE("Invalid stream_type %d", newStream->stream_type);
2121 break;
2122 }
2123
2124 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2125 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2126 QCamera3ProcessingChannel *channel = NULL;
2127 switch (newStream->format) {
2128 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2129 if ((newStream->usage &
2130 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2131 (streamList->operation_mode ==
2132 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2133 ) {
2134 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2135 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002136 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002137 this,
2138 newStream,
2139 (cam_stream_type_t)
2140 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2141 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2142 mMetadataChannel,
2143 0); //heap buffers are not required for HFR video channel
2144 if (channel == NULL) {
2145 LOGE("allocation of channel failed");
2146 pthread_mutex_unlock(&mMutex);
2147 return -ENOMEM;
2148 }
2149 //channel->getNumBuffers() will return 0 here so use
2150 //MAX_INFLIGH_HFR_REQUESTS
2151 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2152 newStream->priv = channel;
2153 LOGI("num video buffers in HFR mode: %d",
2154 MAX_INFLIGHT_HFR_REQUESTS);
2155 } else {
2156 /* Copy stream contents in HFR preview only case to create
2157 * dummy batch channel so that sensor streaming is in
2158 * HFR mode */
2159 if (!m_bIsVideo && (streamList->operation_mode ==
2160 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2161 mDummyBatchStream = *newStream;
2162 }
2163 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2164 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002165 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 this,
2167 newStream,
2168 (cam_stream_type_t)
2169 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2170 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2171 mMetadataChannel,
2172 MAX_INFLIGHT_REQUESTS);
2173 if (channel == NULL) {
2174 LOGE("allocation of channel failed");
2175 pthread_mutex_unlock(&mMutex);
2176 return -ENOMEM;
2177 }
2178 newStream->max_buffers = channel->getNumBuffers();
2179 newStream->priv = channel;
2180 }
2181 break;
2182 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2183 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2184 mChannelHandle,
2185 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002186 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002187 this,
2188 newStream,
2189 (cam_stream_type_t)
2190 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2191 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2192 mMetadataChannel);
2193 if (channel == NULL) {
2194 LOGE("allocation of YUV channel failed");
2195 pthread_mutex_unlock(&mMutex);
2196 return -ENOMEM;
2197 }
2198 newStream->max_buffers = channel->getNumBuffers();
2199 newStream->priv = channel;
2200 break;
2201 }
2202 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2203 case HAL_PIXEL_FORMAT_RAW16:
2204 case HAL_PIXEL_FORMAT_RAW10:
2205 mRawChannel = new QCamera3RawChannel(
2206 mCameraHandle->camera_handle, mChannelHandle,
2207 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002208 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002209 this, newStream,
2210 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2211 mMetadataChannel,
2212 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2213 if (mRawChannel == NULL) {
2214 LOGE("allocation of raw channel failed");
2215 pthread_mutex_unlock(&mMutex);
2216 return -ENOMEM;
2217 }
2218 newStream->max_buffers = mRawChannel->getNumBuffers();
2219 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2220 break;
2221 case HAL_PIXEL_FORMAT_BLOB:
2222 // Max live snapshot inflight buffer is 1. This is to mitigate
2223 // frame drop issues for video snapshot. The more buffers being
2224 // allocated, the more frame drops there are.
2225 mPictureChannel = new QCamera3PicChannel(
2226 mCameraHandle->camera_handle, mChannelHandle,
2227 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002228 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2230 m_bIs4KVideo, isZsl, mMetadataChannel,
2231 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2232 if (mPictureChannel == NULL) {
2233 LOGE("allocation of channel failed");
2234 pthread_mutex_unlock(&mMutex);
2235 return -ENOMEM;
2236 }
2237 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2238 newStream->max_buffers = mPictureChannel->getNumBuffers();
2239 mPictureChannel->overrideYuvSize(
2240 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2241 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2242 break;
2243
2244 default:
2245 LOGE("not a supported format 0x%x", newStream->format);
2246 break;
2247 }
2248 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2249 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2250 } else {
2251 LOGE("Error, Unknown stream type");
2252 pthread_mutex_unlock(&mMutex);
2253 return -EINVAL;
2254 }
2255
2256 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2257 if (channel != NULL && channel->isUBWCEnabled()) {
2258 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002259 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2260 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2262 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2263 }
2264 }
2265
2266 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2267 it != mStreamInfo.end(); it++) {
2268 if ((*it)->stream == newStream) {
2269 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2270 break;
2271 }
2272 }
2273 } else {
2274 // Channel already exists for this stream
2275 // Do nothing for now
2276 }
2277 padding_info = gCamCapability[mCameraId]->padding_info;
2278
2279 /* Do not add entries for input stream in metastream info
2280 * since there is no real stream associated with it
2281 */
2282 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2283 mStreamConfigInfo.num_streams++;
2284 }
2285
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002286 // Create analysis stream all the time, even when h/w support is not available
2287 {
2288 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2289 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2290 gCamCapability[mCameraId]->color_arrangement);
2291 cam_analysis_info_t analysisInfo;
2292 int32_t ret = NO_ERROR;
2293 ret = mCommon.getAnalysisInfo(
2294 FALSE,
2295 analysisFeatureMask,
2296 &analysisInfo);
2297 if (ret == NO_ERROR) {
2298 cam_dimension_t analysisDim;
2299 analysisDim = mCommon.getMatchingDimension(previewSize,
2300 analysisInfo.analysis_recommended_res);
2301
2302 mAnalysisChannel = new QCamera3SupportChannel(
2303 mCameraHandle->camera_handle,
2304 mChannelHandle,
2305 mCameraHandle->ops,
2306 &analysisInfo.analysis_padding_info,
2307 analysisFeatureMask,
2308 CAM_STREAM_TYPE_ANALYSIS,
2309 &analysisDim,
2310 (analysisInfo.analysis_format
2311 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2312 : CAM_FORMAT_YUV_420_NV21),
2313 analysisInfo.hw_analysis_supported,
2314 gCamCapability[mCameraId]->color_arrangement,
2315 this,
2316 0); // force buffer count to 0
2317 } else {
2318 LOGW("getAnalysisInfo failed, ret = %d", ret);
2319 }
2320 if (!mAnalysisChannel) {
2321 LOGW("Analysis channel cannot be created");
2322 }
2323 }
2324
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 //RAW DUMP channel
2326 if (mEnableRawDump && isRawStreamRequested == false){
2327 cam_dimension_t rawDumpSize;
2328 rawDumpSize = getMaxRawSize(mCameraId);
2329 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2330 setPAAFSupport(rawDumpFeatureMask,
2331 CAM_STREAM_TYPE_RAW,
2332 gCamCapability[mCameraId]->color_arrangement);
2333 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2334 mChannelHandle,
2335 mCameraHandle->ops,
2336 rawDumpSize,
2337 &padding_info,
2338 this, rawDumpFeatureMask);
2339 if (!mRawDumpChannel) {
2340 LOGE("Raw Dump channel cannot be created");
2341 pthread_mutex_unlock(&mMutex);
2342 return -ENOMEM;
2343 }
2344 }
2345
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002346 // Initialize HDR+ Raw Source channel.
2347 if (mHdrPlusClient != nullptr) {
2348 if (isRawStreamRequested || mRawDumpChannel) {
2349 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2350 __FUNCTION__);
2351 mHdrPlusClient->disconnect();
2352 mHdrPlusClient = nullptr;
2353 } else {
2354 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2355 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2356 setPAAFSupport(hdrPlusRawFeatureMask,
2357 CAM_STREAM_TYPE_RAW,
2358 gCamCapability[mCameraId]->color_arrangement);
2359 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2360 mChannelHandle,
2361 mCameraHandle->ops,
2362 rawSize,
2363 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002364 this, hdrPlusRawFeatureMask,
2365 mHdrPlusClient,
2366 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002367 if (!mHdrPlusRawSrcChannel) {
2368 LOGE("HDR+ Raw Source channel cannot be created");
2369 pthread_mutex_unlock(&mMutex);
2370 return -ENOMEM;
2371 }
2372 }
2373 }
2374
Thierry Strudel3d639192016-09-09 11:52:26 -07002375
2376 if (mAnalysisChannel) {
2377 cam_analysis_info_t analysisInfo;
2378 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2379 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2380 CAM_STREAM_TYPE_ANALYSIS;
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2382 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2383 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2384 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2385 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002386 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2388 &analysisInfo);
2389 if (rc != NO_ERROR) {
2390 LOGE("getAnalysisInfo failed, ret = %d", rc);
2391 pthread_mutex_unlock(&mMutex);
2392 return rc;
2393 }
2394 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002395 mCommon.getMatchingDimension(previewSize,
2396 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002397 mStreamConfigInfo.num_streams++;
2398 }
2399
2400 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2401 cam_analysis_info_t supportInfo;
2402 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2403 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2404 setPAAFSupport(callbackFeatureMask,
2405 CAM_STREAM_TYPE_CALLBACK,
2406 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002407 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002408 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002409 if (ret != NO_ERROR) {
2410 /* Ignore the error for Mono camera
2411 * because the PAAF bit mask is only set
2412 * for CAM_STREAM_TYPE_ANALYSIS stream type
2413 */
2414 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2415 LOGW("getAnalysisInfo failed, ret = %d", ret);
2416 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002417 }
2418 mSupportChannel = new QCamera3SupportChannel(
2419 mCameraHandle->camera_handle,
2420 mChannelHandle,
2421 mCameraHandle->ops,
2422 &gCamCapability[mCameraId]->padding_info,
2423 callbackFeatureMask,
2424 CAM_STREAM_TYPE_CALLBACK,
2425 &QCamera3SupportChannel::kDim,
2426 CAM_FORMAT_YUV_420_NV21,
2427 supportInfo.hw_analysis_supported,
2428 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002429 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 if (!mSupportChannel) {
2431 LOGE("dummy channel cannot be created");
2432 pthread_mutex_unlock(&mMutex);
2433 return -ENOMEM;
2434 }
2435 }
2436
2437 if (mSupportChannel) {
2438 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2439 QCamera3SupportChannel::kDim;
2440 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2441 CAM_STREAM_TYPE_CALLBACK;
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2443 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2444 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2445 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2446 gCamCapability[mCameraId]->color_arrangement);
2447 mStreamConfigInfo.num_streams++;
2448 }
2449
2450 if (mRawDumpChannel) {
2451 cam_dimension_t rawSize;
2452 rawSize = getMaxRawSize(mCameraId);
2453 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2454 rawSize;
2455 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2456 CAM_STREAM_TYPE_RAW;
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2458 CAM_QCOM_FEATURE_NONE;
2459 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2460 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2461 gCamCapability[mCameraId]->color_arrangement);
2462 mStreamConfigInfo.num_streams++;
2463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002464
2465 if (mHdrPlusRawSrcChannel) {
2466 cam_dimension_t rawSize;
2467 rawSize = getMaxRawSize(mCameraId);
2468 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2469 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2470 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2471 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2473 gCamCapability[mCameraId]->color_arrangement);
2474 mStreamConfigInfo.num_streams++;
2475 }
2476
Thierry Strudel3d639192016-09-09 11:52:26 -07002477 /* In HFR mode, if video stream is not added, create a dummy channel so that
2478 * ISP can create a batch mode even for preview only case. This channel is
2479 * never 'start'ed (no stream-on), it is only 'initialized' */
2480 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2481 !m_bIsVideo) {
2482 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2483 setPAAFSupport(dummyFeatureMask,
2484 CAM_STREAM_TYPE_VIDEO,
2485 gCamCapability[mCameraId]->color_arrangement);
2486 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2487 mChannelHandle,
2488 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002489 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 this,
2491 &mDummyBatchStream,
2492 CAM_STREAM_TYPE_VIDEO,
2493 dummyFeatureMask,
2494 mMetadataChannel);
2495 if (NULL == mDummyBatchChannel) {
2496 LOGE("creation of mDummyBatchChannel failed."
2497 "Preview will use non-hfr sensor mode ");
2498 }
2499 }
2500 if (mDummyBatchChannel) {
2501 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2502 mDummyBatchStream.width;
2503 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2504 mDummyBatchStream.height;
2505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2506 CAM_STREAM_TYPE_VIDEO;
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2508 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2509 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2511 gCamCapability[mCameraId]->color_arrangement);
2512 mStreamConfigInfo.num_streams++;
2513 }
2514
2515 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2516 mStreamConfigInfo.buffer_info.max_buffers =
2517 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2518
2519 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2520 for (pendingRequestIterator i = mPendingRequestsList.begin();
2521 i != mPendingRequestsList.end();) {
2522 i = erasePendingRequest(i);
2523 }
2524 mPendingFrameDropList.clear();
2525 // Initialize/Reset the pending buffers list
2526 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2527 req.mPendingBufferList.clear();
2528 }
2529 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2530
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 mCurJpegMeta.clear();
2532 //Get min frame duration for this streams configuration
2533 deriveMinFrameDuration();
2534
2535 // Update state
2536 mState = CONFIGURED;
2537
2538 pthread_mutex_unlock(&mMutex);
2539
2540 return rc;
2541}
2542
2543/*===========================================================================
2544 * FUNCTION : validateCaptureRequest
2545 *
2546 * DESCRIPTION: validate a capture request from camera service
2547 *
2548 * PARAMETERS :
2549 * @request : request from framework to process
2550 *
2551 * RETURN :
2552 *
2553 *==========================================================================*/
2554int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002555 camera3_capture_request_t *request,
2556 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002557{
2558 ssize_t idx = 0;
2559 const camera3_stream_buffer_t *b;
2560 CameraMetadata meta;
2561
2562 /* Sanity check the request */
2563 if (request == NULL) {
2564 LOGE("NULL capture request");
2565 return BAD_VALUE;
2566 }
2567
2568 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2569 /*settings cannot be null for the first request*/
2570 return BAD_VALUE;
2571 }
2572
2573 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002574 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2575 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002576 LOGE("Request %d: No output buffers provided!",
2577 __FUNCTION__, frameNumber);
2578 return BAD_VALUE;
2579 }
2580 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2581 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2582 request->num_output_buffers, MAX_NUM_STREAMS);
2583 return BAD_VALUE;
2584 }
2585 if (request->input_buffer != NULL) {
2586 b = request->input_buffer;
2587 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2588 LOGE("Request %d: Buffer %ld: Status not OK!",
2589 frameNumber, (long)idx);
2590 return BAD_VALUE;
2591 }
2592 if (b->release_fence != -1) {
2593 LOGE("Request %d: Buffer %ld: Has a release fence!",
2594 frameNumber, (long)idx);
2595 return BAD_VALUE;
2596 }
2597 if (b->buffer == NULL) {
2598 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2599 frameNumber, (long)idx);
2600 return BAD_VALUE;
2601 }
2602 }
2603
2604 // Validate all buffers
2605 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002606 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002607 QCamera3ProcessingChannel *channel =
2608 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2609 if (channel == NULL) {
2610 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2611 frameNumber, (long)idx);
2612 return BAD_VALUE;
2613 }
2614 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2615 LOGE("Request %d: Buffer %ld: Status not OK!",
2616 frameNumber, (long)idx);
2617 return BAD_VALUE;
2618 }
2619 if (b->release_fence != -1) {
2620 LOGE("Request %d: Buffer %ld: Has a release fence!",
2621 frameNumber, (long)idx);
2622 return BAD_VALUE;
2623 }
2624 if (b->buffer == NULL) {
2625 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2626 frameNumber, (long)idx);
2627 return BAD_VALUE;
2628 }
2629 if (*(b->buffer) == NULL) {
2630 LOGE("Request %d: Buffer %ld: NULL private handle!",
2631 frameNumber, (long)idx);
2632 return BAD_VALUE;
2633 }
2634 idx++;
2635 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002637 return NO_ERROR;
2638}
2639
2640/*===========================================================================
2641 * FUNCTION : deriveMinFrameDuration
2642 *
2643 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2644 * on currently configured streams.
2645 *
2646 * PARAMETERS : NONE
2647 *
2648 * RETURN : NONE
2649 *
2650 *==========================================================================*/
2651void QCamera3HardwareInterface::deriveMinFrameDuration()
2652{
2653 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2654
2655 maxJpegDim = 0;
2656 maxProcessedDim = 0;
2657 maxRawDim = 0;
2658
2659 // Figure out maximum jpeg, processed, and raw dimensions
2660 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2661 it != mStreamInfo.end(); it++) {
2662
2663 // Input stream doesn't have valid stream_type
2664 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2665 continue;
2666
2667 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2668 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2669 if (dimension > maxJpegDim)
2670 maxJpegDim = dimension;
2671 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2672 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2673 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2674 if (dimension > maxRawDim)
2675 maxRawDim = dimension;
2676 } else {
2677 if (dimension > maxProcessedDim)
2678 maxProcessedDim = dimension;
2679 }
2680 }
2681
2682 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2683 MAX_SIZES_CNT);
2684
2685 //Assume all jpeg dimensions are in processed dimensions.
2686 if (maxJpegDim > maxProcessedDim)
2687 maxProcessedDim = maxJpegDim;
2688 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2689 if (maxProcessedDim > maxRawDim) {
2690 maxRawDim = INT32_MAX;
2691
2692 for (size_t i = 0; i < count; i++) {
2693 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2694 gCamCapability[mCameraId]->raw_dim[i].height;
2695 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2696 maxRawDim = dimension;
2697 }
2698 }
2699
2700 //Find minimum durations for processed, jpeg, and raw
2701 for (size_t i = 0; i < count; i++) {
2702 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2703 gCamCapability[mCameraId]->raw_dim[i].height) {
2704 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2705 break;
2706 }
2707 }
2708 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2709 for (size_t i = 0; i < count; i++) {
2710 if (maxProcessedDim ==
2711 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2712 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2713 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2714 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2715 break;
2716 }
2717 }
2718}
2719
2720/*===========================================================================
2721 * FUNCTION : getMinFrameDuration
2722 *
2723 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2724 * and current request configuration.
2725 *
2726 * PARAMETERS : @request: requset sent by the frameworks
2727 *
2728 * RETURN : min farme duration for a particular request
2729 *
2730 *==========================================================================*/
2731int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2732{
2733 bool hasJpegStream = false;
2734 bool hasRawStream = false;
2735 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2736 const camera3_stream_t *stream = request->output_buffers[i].stream;
2737 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2738 hasJpegStream = true;
2739 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2740 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2741 stream->format == HAL_PIXEL_FORMAT_RAW16)
2742 hasRawStream = true;
2743 }
2744
2745 if (!hasJpegStream)
2746 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2747 else
2748 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2749}
2750
2751/*===========================================================================
2752 * FUNCTION : handleBuffersDuringFlushLock
2753 *
2754 * DESCRIPTION: Account for buffers returned from back-end during flush
2755 * This function is executed while mMutex is held by the caller.
2756 *
2757 * PARAMETERS :
2758 * @buffer: image buffer for the callback
2759 *
2760 * RETURN :
2761 *==========================================================================*/
2762void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2763{
2764 bool buffer_found = false;
2765 for (List<PendingBuffersInRequest>::iterator req =
2766 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2767 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2768 for (List<PendingBufferInfo>::iterator i =
2769 req->mPendingBufferList.begin();
2770 i != req->mPendingBufferList.end(); i++) {
2771 if (i->buffer == buffer->buffer) {
2772 mPendingBuffersMap.numPendingBufsAtFlush--;
2773 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2774 buffer->buffer, req->frame_number,
2775 mPendingBuffersMap.numPendingBufsAtFlush);
2776 buffer_found = true;
2777 break;
2778 }
2779 }
2780 if (buffer_found) {
2781 break;
2782 }
2783 }
2784 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2785 //signal the flush()
2786 LOGD("All buffers returned to HAL. Continue flush");
2787 pthread_cond_signal(&mBuffersCond);
2788 }
2789}
2790
Thierry Strudel3d639192016-09-09 11:52:26 -07002791/*===========================================================================
2792 * FUNCTION : handleBatchMetadata
2793 *
2794 * DESCRIPTION: Handles metadata buffer callback in batch mode
2795 *
2796 * PARAMETERS : @metadata_buf: metadata buffer
2797 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2798 * the meta buf in this method
2799 *
2800 * RETURN :
2801 *
2802 *==========================================================================*/
2803void QCamera3HardwareInterface::handleBatchMetadata(
2804 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2805{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002806 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002807
2808 if (NULL == metadata_buf) {
2809 LOGE("metadata_buf is NULL");
2810 return;
2811 }
2812 /* In batch mode, the metdata will contain the frame number and timestamp of
2813 * the last frame in the batch. Eg: a batch containing buffers from request
2814 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2815 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2816 * multiple process_capture_results */
2817 metadata_buffer_t *metadata =
2818 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2819 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2820 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2821 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2822 uint32_t frame_number = 0, urgent_frame_number = 0;
2823 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2824 bool invalid_metadata = false;
2825 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2826 size_t loopCount = 1;
2827
2828 int32_t *p_frame_number_valid =
2829 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2830 uint32_t *p_frame_number =
2831 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2832 int64_t *p_capture_time =
2833 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2834 int32_t *p_urgent_frame_number_valid =
2835 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2836 uint32_t *p_urgent_frame_number =
2837 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2838
2839 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2840 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2841 (NULL == p_urgent_frame_number)) {
2842 LOGE("Invalid metadata");
2843 invalid_metadata = true;
2844 } else {
2845 frame_number_valid = *p_frame_number_valid;
2846 last_frame_number = *p_frame_number;
2847 last_frame_capture_time = *p_capture_time;
2848 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2849 last_urgent_frame_number = *p_urgent_frame_number;
2850 }
2851
2852 /* In batchmode, when no video buffers are requested, set_parms are sent
2853 * for every capture_request. The difference between consecutive urgent
2854 * frame numbers and frame numbers should be used to interpolate the
2855 * corresponding frame numbers and time stamps */
2856 pthread_mutex_lock(&mMutex);
2857 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002858 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2859 if(idx < 0) {
2860 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2861 last_urgent_frame_number);
2862 mState = ERROR;
2863 pthread_mutex_unlock(&mMutex);
2864 return;
2865 }
2866 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002867 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2868 first_urgent_frame_number;
2869
2870 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2871 urgent_frame_number_valid,
2872 first_urgent_frame_number, last_urgent_frame_number);
2873 }
2874
2875 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002876 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2877 if(idx < 0) {
2878 LOGE("Invalid frame number received: %d. Irrecoverable error",
2879 last_frame_number);
2880 mState = ERROR;
2881 pthread_mutex_unlock(&mMutex);
2882 return;
2883 }
2884 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 frameNumDiff = last_frame_number + 1 -
2886 first_frame_number;
2887 mPendingBatchMap.removeItem(last_frame_number);
2888
2889 LOGD("frm: valid: %d frm_num: %d - %d",
2890 frame_number_valid,
2891 first_frame_number, last_frame_number);
2892
2893 }
2894 pthread_mutex_unlock(&mMutex);
2895
2896 if (urgent_frame_number_valid || frame_number_valid) {
2897 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2898 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2899 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2900 urgentFrameNumDiff, last_urgent_frame_number);
2901 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2902 LOGE("frameNumDiff: %d frameNum: %d",
2903 frameNumDiff, last_frame_number);
2904 }
2905
2906 for (size_t i = 0; i < loopCount; i++) {
2907 /* handleMetadataWithLock is called even for invalid_metadata for
2908 * pipeline depth calculation */
2909 if (!invalid_metadata) {
2910 /* Infer frame number. Batch metadata contains frame number of the
2911 * last frame */
2912 if (urgent_frame_number_valid) {
2913 if (i < urgentFrameNumDiff) {
2914 urgent_frame_number =
2915 first_urgent_frame_number + i;
2916 LOGD("inferred urgent frame_number: %d",
2917 urgent_frame_number);
2918 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2919 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2920 } else {
2921 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2922 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2923 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2924 }
2925 }
2926
2927 /* Infer frame number. Batch metadata contains frame number of the
2928 * last frame */
2929 if (frame_number_valid) {
2930 if (i < frameNumDiff) {
2931 frame_number = first_frame_number + i;
2932 LOGD("inferred frame_number: %d", frame_number);
2933 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2934 CAM_INTF_META_FRAME_NUMBER, frame_number);
2935 } else {
2936 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2937 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2938 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2939 }
2940 }
2941
2942 if (last_frame_capture_time) {
2943 //Infer timestamp
2944 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002945 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002947 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002948 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2949 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2950 LOGD("batch capture_time: %lld, capture_time: %lld",
2951 last_frame_capture_time, capture_time);
2952 }
2953 }
2954 pthread_mutex_lock(&mMutex);
2955 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002956 false /* free_and_bufdone_meta_buf */,
2957 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 pthread_mutex_unlock(&mMutex);
2959 }
2960
2961 /* BufDone metadata buffer */
2962 if (free_and_bufdone_meta_buf) {
2963 mMetadataChannel->bufDone(metadata_buf);
2964 free(metadata_buf);
2965 }
2966}
2967
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002968void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2969 camera3_error_msg_code_t errorCode)
2970{
2971 camera3_notify_msg_t notify_msg;
2972 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2973 notify_msg.type = CAMERA3_MSG_ERROR;
2974 notify_msg.message.error.error_code = errorCode;
2975 notify_msg.message.error.error_stream = NULL;
2976 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002977 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002978
2979 return;
2980}
Thierry Strudel3d639192016-09-09 11:52:26 -07002981/*===========================================================================
2982 * FUNCTION : handleMetadataWithLock
2983 *
2984 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2985 *
2986 * PARAMETERS : @metadata_buf: metadata buffer
2987 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2988 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002989 * @firstMetadataInBatch: Boolean to indicate whether this is the
2990 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002991 *
2992 * RETURN :
2993 *
2994 *==========================================================================*/
2995void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002996 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2997 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002998{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002999 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3001 //during flush do not send metadata from this thread
3002 LOGD("not sending metadata during flush or when mState is error");
3003 if (free_and_bufdone_meta_buf) {
3004 mMetadataChannel->bufDone(metadata_buf);
3005 free(metadata_buf);
3006 }
3007 return;
3008 }
3009
3010 //not in flush
3011 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3012 int32_t frame_number_valid, urgent_frame_number_valid;
3013 uint32_t frame_number, urgent_frame_number;
3014 int64_t capture_time;
3015 nsecs_t currentSysTime;
3016
3017 int32_t *p_frame_number_valid =
3018 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3019 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3020 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3021 int32_t *p_urgent_frame_number_valid =
3022 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3023 uint32_t *p_urgent_frame_number =
3024 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3025 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3026 metadata) {
3027 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3028 *p_frame_number_valid, *p_frame_number);
3029 }
3030
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003031 camera_metadata_t *resultMetadata = nullptr;
3032
Thierry Strudel3d639192016-09-09 11:52:26 -07003033 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3034 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3035 LOGE("Invalid metadata");
3036 if (free_and_bufdone_meta_buf) {
3037 mMetadataChannel->bufDone(metadata_buf);
3038 free(metadata_buf);
3039 }
3040 goto done_metadata;
3041 }
3042 frame_number_valid = *p_frame_number_valid;
3043 frame_number = *p_frame_number;
3044 capture_time = *p_capture_time;
3045 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3046 urgent_frame_number = *p_urgent_frame_number;
3047 currentSysTime = systemTime(CLOCK_MONOTONIC);
3048
3049 // Detect if buffers from any requests are overdue
3050 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003051 int64_t timeout;
3052 {
3053 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3054 // If there is a pending HDR+ request, the following requests may be blocked until the
3055 // HDR+ request is done. So allow a longer timeout.
3056 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3057 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3058 }
3059
3060 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003061 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003062 assert(missed.stream->priv);
3063 if (missed.stream->priv) {
3064 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3065 assert(ch->mStreams[0]);
3066 if (ch->mStreams[0]) {
3067 LOGE("Cancel missing frame = %d, buffer = %p,"
3068 "stream type = %d, stream format = %d",
3069 req.frame_number, missed.buffer,
3070 ch->mStreams[0]->getMyType(), missed.stream->format);
3071 ch->timeoutFrame(req.frame_number);
3072 }
3073 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003074 }
3075 }
3076 }
3077 //Partial result on process_capture_result for timestamp
3078 if (urgent_frame_number_valid) {
3079 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3080 urgent_frame_number, capture_time);
3081
3082 //Recieved an urgent Frame Number, handle it
3083 //using partial results
3084 for (pendingRequestIterator i =
3085 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3086 LOGD("Iterator Frame = %d urgent frame = %d",
3087 i->frame_number, urgent_frame_number);
3088
3089 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3090 (i->partial_result_cnt == 0)) {
3091 LOGE("Error: HAL missed urgent metadata for frame number %d",
3092 i->frame_number);
3093 }
3094
3095 if (i->frame_number == urgent_frame_number &&
3096 i->bUrgentReceived == 0) {
3097
3098 camera3_capture_result_t result;
3099 memset(&result, 0, sizeof(camera3_capture_result_t));
3100
3101 i->partial_result_cnt++;
3102 i->bUrgentReceived = 1;
3103 // Extract 3A metadata
3104 result.result =
3105 translateCbUrgentMetadataToResultMetadata(metadata);
3106 // Populate metadata result
3107 result.frame_number = urgent_frame_number;
3108 result.num_output_buffers = 0;
3109 result.output_buffers = NULL;
3110 result.partial_result = i->partial_result_cnt;
3111
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003112 if (mHdrPlusClient != nullptr) {
3113 // Notify HDR+ client about the partial metadata.
3114 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3115 result.partial_result == PARTIAL_RESULT_COUNT);
3116 }
3117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003119 LOGD("urgent frame_number = %u, capture_time = %lld",
3120 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003121 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3122 // Instant AEC settled for this frame.
3123 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3124 mInstantAECSettledFrameNumber = urgent_frame_number;
3125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003126 free_camera_metadata((camera_metadata_t *)result.result);
3127 break;
3128 }
3129 }
3130 }
3131
3132 if (!frame_number_valid) {
3133 LOGD("Not a valid normal frame number, used as SOF only");
3134 if (free_and_bufdone_meta_buf) {
3135 mMetadataChannel->bufDone(metadata_buf);
3136 free(metadata_buf);
3137 }
3138 goto done_metadata;
3139 }
3140 LOGH("valid frame_number = %u, capture_time = %lld",
3141 frame_number, capture_time);
3142
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003143 // Check whether any stream buffer corresponding to this is dropped or not
3144 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3145 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3146 for (auto & pendingRequest : mPendingRequestsList) {
3147 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3148 mInstantAECSettledFrameNumber)) {
3149 camera3_notify_msg_t notify_msg = {};
3150 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003151 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003152 QCamera3ProcessingChannel *channel =
3153 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003154 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003155 if (p_cam_frame_drop) {
3156 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003157 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003158 // Got the stream ID for drop frame.
3159 dropFrame = true;
3160 break;
3161 }
3162 }
3163 } else {
3164 // This is instant AEC case.
3165 // For instant AEC drop the stream untill AEC is settled.
3166 dropFrame = true;
3167 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003168
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003169 if (dropFrame) {
3170 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3171 if (p_cam_frame_drop) {
3172 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003173 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003174 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003175 } else {
3176 // For instant AEC, inform frame drop and frame number
3177 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3178 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003179 pendingRequest.frame_number, streamID,
3180 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003181 }
3182 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003183 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003184 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003185 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003186 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003187 if (p_cam_frame_drop) {
3188 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003189 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003190 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003191 } else {
3192 // For instant AEC, inform frame drop and frame number
3193 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3194 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003195 pendingRequest.frame_number, streamID,
3196 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003197 }
3198 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003199 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003200 PendingFrameDrop.stream_ID = streamID;
3201 // Add the Frame drop info to mPendingFrameDropList
3202 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003203 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003204 }
3205 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003206 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003207
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003208 for (auto & pendingRequest : mPendingRequestsList) {
3209 // Find the pending request with the frame number.
3210 if (pendingRequest.frame_number == frame_number) {
3211 // Update the sensor timestamp.
3212 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003213
Thierry Strudel3d639192016-09-09 11:52:26 -07003214
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003215 /* Set the timestamp in display metadata so that clients aware of
3216 private_handle such as VT can use this un-modified timestamps.
3217 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003218 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003219
Thierry Strudel3d639192016-09-09 11:52:26 -07003220 // Find channel requiring metadata, meaning internal offline postprocess
3221 // is needed.
3222 //TODO: for now, we don't support two streams requiring metadata at the same time.
3223 // (because we are not making copies, and metadata buffer is not reference counted.
3224 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003225 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3226 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003227 if (iter->need_metadata) {
3228 internalPproc = true;
3229 QCamera3ProcessingChannel *channel =
3230 (QCamera3ProcessingChannel *)iter->stream->priv;
3231 channel->queueReprocMetadata(metadata_buf);
3232 break;
3233 }
3234 }
3235
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003236 for (auto itr = pendingRequest.internalRequestList.begin();
3237 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003238 if (itr->need_metadata) {
3239 internalPproc = true;
3240 QCamera3ProcessingChannel *channel =
3241 (QCamera3ProcessingChannel *)itr->stream->priv;
3242 channel->queueReprocMetadata(metadata_buf);
3243 break;
3244 }
3245 }
3246
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003247 resultMetadata = translateFromHalMetadata(metadata,
3248 pendingRequest.timestamp, pendingRequest.request_id,
3249 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3250 pendingRequest.capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08003251 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003252 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003253 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003254 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003255 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003256
3257 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003258 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003259
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003260 if (pendingRequest.blob_request) {
3261 //Dump tuning metadata if enabled and available
3262 char prop[PROPERTY_VALUE_MAX];
3263 memset(prop, 0, sizeof(prop));
3264 property_get("persist.camera.dumpmetadata", prop, "0");
3265 int32_t enabled = atoi(prop);
3266 if (enabled && metadata->is_tuning_params_valid) {
3267 dumpMetadataToFile(metadata->tuning_params,
3268 mMetaFrameCount,
3269 enabled,
3270 "Snapshot",
3271 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 }
3273 }
3274
3275 if (!internalPproc) {
3276 LOGD("couldn't find need_metadata for this metadata");
3277 // Return metadata buffer
3278 if (free_and_bufdone_meta_buf) {
3279 mMetadataChannel->bufDone(metadata_buf);
3280 free(metadata_buf);
3281 }
3282 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003283
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003284 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003285 }
3286 }
3287
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003288 // Try to send out shutter callbacks and capture results.
3289 handlePendingResultsWithLock(frame_number, resultMetadata);
3290 return;
3291
Thierry Strudel3d639192016-09-09 11:52:26 -07003292done_metadata:
3293 for (pendingRequestIterator i = mPendingRequestsList.begin();
3294 i != mPendingRequestsList.end() ;i++) {
3295 i->pipeline_depth++;
3296 }
3297 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3298 unblockRequestIfNecessary();
3299}
3300
3301/*===========================================================================
3302 * FUNCTION : hdrPlusPerfLock
3303 *
3304 * DESCRIPTION: perf lock for HDR+ using custom intent
3305 *
3306 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3307 *
3308 * RETURN : None
3309 *
3310 *==========================================================================*/
3311void QCamera3HardwareInterface::hdrPlusPerfLock(
3312 mm_camera_super_buf_t *metadata_buf)
3313{
3314 if (NULL == metadata_buf) {
3315 LOGE("metadata_buf is NULL");
3316 return;
3317 }
3318 metadata_buffer_t *metadata =
3319 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3320 int32_t *p_frame_number_valid =
3321 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3322 uint32_t *p_frame_number =
3323 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3324
3325 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3326 LOGE("%s: Invalid metadata", __func__);
3327 return;
3328 }
3329
3330 //acquire perf lock for 5 sec after the last HDR frame is captured
3331 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3332 if ((p_frame_number != NULL) &&
3333 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003334 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003335 }
3336 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003337}
3338
3339/*===========================================================================
3340 * FUNCTION : handleInputBufferWithLock
3341 *
3342 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3343 *
3344 * PARAMETERS : @frame_number: frame number of the input buffer
3345 *
3346 * RETURN :
3347 *
3348 *==========================================================================*/
3349void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3350{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003351 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003352 pendingRequestIterator i = mPendingRequestsList.begin();
3353 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3354 i++;
3355 }
3356 if (i != mPendingRequestsList.end() && i->input_buffer) {
3357 //found the right request
3358 if (!i->shutter_notified) {
3359 CameraMetadata settings;
3360 camera3_notify_msg_t notify_msg;
3361 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3362 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3363 if(i->settings) {
3364 settings = i->settings;
3365 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3366 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3367 } else {
3368 LOGE("No timestamp in input settings! Using current one.");
3369 }
3370 } else {
3371 LOGE("Input settings missing!");
3372 }
3373
3374 notify_msg.type = CAMERA3_MSG_SHUTTER;
3375 notify_msg.message.shutter.frame_number = frame_number;
3376 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003377 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 i->shutter_notified = true;
3379 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3380 i->frame_number, notify_msg.message.shutter.timestamp);
3381 }
3382
3383 if (i->input_buffer->release_fence != -1) {
3384 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3385 close(i->input_buffer->release_fence);
3386 if (rc != OK) {
3387 LOGE("input buffer sync wait failed %d", rc);
3388 }
3389 }
3390
3391 camera3_capture_result result;
3392 memset(&result, 0, sizeof(camera3_capture_result));
3393 result.frame_number = frame_number;
3394 result.result = i->settings;
3395 result.input_buffer = i->input_buffer;
3396 result.partial_result = PARTIAL_RESULT_COUNT;
3397
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003398 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003399 LOGD("Input request metadata and input buffer frame_number = %u",
3400 i->frame_number);
3401 i = erasePendingRequest(i);
3402 } else {
3403 LOGE("Could not find input request for frame number %d", frame_number);
3404 }
3405}
3406
3407/*===========================================================================
3408 * FUNCTION : handleBufferWithLock
3409 *
3410 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3411 *
3412 * PARAMETERS : @buffer: image buffer for the callback
3413 * @frame_number: frame number of the image buffer
3414 *
3415 * RETURN :
3416 *
3417 *==========================================================================*/
3418void QCamera3HardwareInterface::handleBufferWithLock(
3419 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3420{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003421 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003422
3423 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3424 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3425 }
3426
Thierry Strudel3d639192016-09-09 11:52:26 -07003427 /* Nothing to be done during error state */
3428 if ((ERROR == mState) || (DEINIT == mState)) {
3429 return;
3430 }
3431 if (mFlushPerf) {
3432 handleBuffersDuringFlushLock(buffer);
3433 return;
3434 }
3435 //not in flush
3436 // If the frame number doesn't exist in the pending request list,
3437 // directly send the buffer to the frameworks, and update pending buffers map
3438 // Otherwise, book-keep the buffer.
3439 pendingRequestIterator i = mPendingRequestsList.begin();
3440 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3441 i++;
3442 }
3443 if (i == mPendingRequestsList.end()) {
3444 // Verify all pending requests frame_numbers are greater
3445 for (pendingRequestIterator j = mPendingRequestsList.begin();
3446 j != mPendingRequestsList.end(); j++) {
3447 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3448 LOGW("Error: pending live frame number %d is smaller than %d",
3449 j->frame_number, frame_number);
3450 }
3451 }
3452 camera3_capture_result_t result;
3453 memset(&result, 0, sizeof(camera3_capture_result_t));
3454 result.result = NULL;
3455 result.frame_number = frame_number;
3456 result.num_output_buffers = 1;
3457 result.partial_result = 0;
3458 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3459 m != mPendingFrameDropList.end(); m++) {
3460 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3461 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3462 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3463 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3464 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3465 frame_number, streamID);
3466 m = mPendingFrameDropList.erase(m);
3467 break;
3468 }
3469 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003470 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003471 result.output_buffers = buffer;
3472 LOGH("result frame_number = %d, buffer = %p",
3473 frame_number, buffer->buffer);
3474
3475 mPendingBuffersMap.removeBuf(buffer->buffer);
3476
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003477 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 } else {
3479 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003480 if (i->input_buffer->release_fence != -1) {
3481 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3482 close(i->input_buffer->release_fence);
3483 if (rc != OK) {
3484 LOGE("input buffer sync wait failed %d", rc);
3485 }
3486 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003488
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003489 // Put buffer into the pending request
3490 for (auto &requestedBuffer : i->buffers) {
3491 if (requestedBuffer.stream == buffer->stream) {
3492 if (requestedBuffer.buffer != nullptr) {
3493 LOGE("Error: buffer is already set");
3494 } else {
3495 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3496 sizeof(camera3_stream_buffer_t));
3497 *(requestedBuffer.buffer) = *buffer;
3498 LOGH("cache buffer %p at result frame_number %u",
3499 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 }
3501 }
3502 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003503
3504 if (i->input_buffer) {
3505 // For a reprocessing request, try to send out shutter callback and result metadata.
3506 handlePendingResultsWithLock(frame_number, nullptr);
3507 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003508 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003509
3510 if (mPreviewStarted == false) {
3511 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3512 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3513 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3514 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3515 mPreviewStarted = true;
3516
3517 // Set power hint for preview
3518 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3519 }
3520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003521}
3522
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003523void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3524 const camera_metadata_t *resultMetadata)
3525{
3526 // Find the pending request for this result metadata.
3527 auto requestIter = mPendingRequestsList.begin();
3528 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3529 requestIter++;
3530 }
3531
3532 if (requestIter == mPendingRequestsList.end()) {
3533 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3534 return;
3535 }
3536
3537 // Update the result metadata
3538 requestIter->resultMetadata = resultMetadata;
3539
3540 // Check what type of request this is.
3541 bool liveRequest = false;
3542 if (requestIter->hdrplus) {
3543 // HDR+ request doesn't have partial results.
3544 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3545 } else if (requestIter->input_buffer != nullptr) {
3546 // Reprocessing request result is the same as settings.
3547 requestIter->resultMetadata = requestIter->settings;
3548 // Reprocessing request doesn't have partial results.
3549 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3550 } else {
3551 liveRequest = true;
3552 requestIter->partial_result_cnt++;
3553 mPendingLiveRequest--;
3554
3555 // For a live request, send the metadata to HDR+ client.
3556 if (mHdrPlusClient != nullptr) {
3557 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3558 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3559 }
3560 }
3561
3562 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3563 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3564 bool readyToSend = true;
3565
3566 // Iterate through the pending requests to send out shutter callbacks and results that are
3567 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3568 // live requests that don't have result metadata yet.
3569 auto iter = mPendingRequestsList.begin();
3570 while (iter != mPendingRequestsList.end()) {
3571 // Check if current pending request is ready. If it's not ready, the following pending
3572 // requests are also not ready.
3573 if (readyToSend && iter->resultMetadata == nullptr) {
3574 readyToSend = false;
3575 }
3576
3577 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3578
3579 std::vector<camera3_stream_buffer_t> outputBuffers;
3580
3581 camera3_capture_result_t result = {};
3582 result.frame_number = iter->frame_number;
3583 result.result = iter->resultMetadata;
3584 result.partial_result = iter->partial_result_cnt;
3585
3586 // If this pending buffer has result metadata, we may be able to send out shutter callback
3587 // and result metadata.
3588 if (iter->resultMetadata != nullptr) {
3589 if (!readyToSend) {
3590 // If any of the previous pending request is not ready, this pending request is
3591 // also not ready to send in order to keep shutter callbacks and result metadata
3592 // in order.
3593 iter++;
3594 continue;
3595 }
3596
3597 // Invoke shutter callback if not yet.
3598 if (!iter->shutter_notified) {
3599 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3600
3601 // Find the timestamp in HDR+ result metadata
3602 camera_metadata_ro_entry_t entry;
3603 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3604 ANDROID_SENSOR_TIMESTAMP, &entry);
3605 if (res != OK) {
3606 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3607 __FUNCTION__, iter->frame_number, strerror(-res), res);
3608 } else {
3609 timestamp = entry.data.i64[0];
3610 }
3611
3612 camera3_notify_msg_t notify_msg = {};
3613 notify_msg.type = CAMERA3_MSG_SHUTTER;
3614 notify_msg.message.shutter.frame_number = iter->frame_number;
3615 notify_msg.message.shutter.timestamp = timestamp;
3616 orchestrateNotify(&notify_msg);
3617 iter->shutter_notified = true;
3618 }
3619
3620 result.input_buffer = iter->input_buffer;
3621
3622 // Prepare output buffer array
3623 for (auto bufferInfoIter = iter->buffers.begin();
3624 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3625 if (bufferInfoIter->buffer != nullptr) {
3626
3627 QCamera3Channel *channel =
3628 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3629 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3630
3631 // Check if this buffer is a dropped frame.
3632 auto frameDropIter = mPendingFrameDropList.begin();
3633 while (frameDropIter != mPendingFrameDropList.end()) {
3634 if((frameDropIter->stream_ID == streamID) &&
3635 (frameDropIter->frame_number == frameNumber)) {
3636 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3637 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3638 streamID);
3639 mPendingFrameDropList.erase(frameDropIter);
3640 break;
3641 } else {
3642 frameDropIter++;
3643 }
3644 }
3645
3646 // Check buffer error status
3647 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3648 bufferInfoIter->buffer->buffer);
3649 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3650
3651 outputBuffers.push_back(*(bufferInfoIter->buffer));
3652 free(bufferInfoIter->buffer);
3653 bufferInfoIter->buffer = NULL;
3654 }
3655 }
3656
3657 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3658 result.num_output_buffers = outputBuffers.size();
3659 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3660 // If the result metadata belongs to a live request, notify errors for previous pending
3661 // live requests.
3662 mPendingLiveRequest--;
3663
3664 CameraMetadata dummyMetadata;
3665 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3666 result.result = dummyMetadata.release();
3667
3668 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3669 } else {
3670 iter++;
3671 continue;
3672 }
3673
3674 orchestrateResult(&result);
3675
3676 // For reprocessing, result metadata is the same as settings so do not free it here to
3677 // avoid double free.
3678 if (result.result != iter->settings) {
3679 free_camera_metadata((camera_metadata_t *)result.result);
3680 }
3681 iter->resultMetadata = nullptr;
3682 iter = erasePendingRequest(iter);
3683 }
3684
3685 if (liveRequest) {
3686 for (auto &iter : mPendingRequestsList) {
3687 // Increment pipeline depth for the following pending requests.
3688 if (iter.frame_number > frameNumber) {
3689 iter.pipeline_depth++;
3690 }
3691 }
3692 }
3693
3694 unblockRequestIfNecessary();
3695}
3696
Thierry Strudel3d639192016-09-09 11:52:26 -07003697/*===========================================================================
3698 * FUNCTION : unblockRequestIfNecessary
3699 *
3700 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3701 * that mMutex is held when this function is called.
3702 *
3703 * PARAMETERS :
3704 *
3705 * RETURN :
3706 *
3707 *==========================================================================*/
3708void QCamera3HardwareInterface::unblockRequestIfNecessary()
3709{
3710 // Unblock process_capture_request
3711 pthread_cond_signal(&mRequestCond);
3712}
3713
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003714/*===========================================================================
3715 * FUNCTION : isHdrSnapshotRequest
3716 *
3717 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3718 *
3719 * PARAMETERS : camera3 request structure
3720 *
3721 * RETURN : boolean decision variable
3722 *
3723 *==========================================================================*/
3724bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3725{
3726 if (request == NULL) {
3727 LOGE("Invalid request handle");
3728 assert(0);
3729 return false;
3730 }
3731
3732 if (!mForceHdrSnapshot) {
3733 CameraMetadata frame_settings;
3734 frame_settings = request->settings;
3735
3736 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3737 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3738 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3739 return false;
3740 }
3741 } else {
3742 return false;
3743 }
3744
3745 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3746 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3747 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3748 return false;
3749 }
3750 } else {
3751 return false;
3752 }
3753 }
3754
3755 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3756 if (request->output_buffers[i].stream->format
3757 == HAL_PIXEL_FORMAT_BLOB) {
3758 return true;
3759 }
3760 }
3761
3762 return false;
3763}
3764/*===========================================================================
3765 * FUNCTION : orchestrateRequest
3766 *
3767 * DESCRIPTION: Orchestrates a capture request from camera service
3768 *
3769 * PARAMETERS :
3770 * @request : request from framework to process
3771 *
3772 * RETURN : Error status codes
3773 *
3774 *==========================================================================*/
3775int32_t QCamera3HardwareInterface::orchestrateRequest(
3776 camera3_capture_request_t *request)
3777{
3778
3779 uint32_t originalFrameNumber = request->frame_number;
3780 uint32_t originalOutputCount = request->num_output_buffers;
3781 const camera_metadata_t *original_settings = request->settings;
3782 List<InternalRequest> internallyRequestedStreams;
3783 List<InternalRequest> emptyInternalList;
3784
3785 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3786 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3787 uint32_t internalFrameNumber;
3788 CameraMetadata modified_meta;
3789
3790
3791 /* Add Blob channel to list of internally requested streams */
3792 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3793 if (request->output_buffers[i].stream->format
3794 == HAL_PIXEL_FORMAT_BLOB) {
3795 InternalRequest streamRequested;
3796 streamRequested.meteringOnly = 1;
3797 streamRequested.need_metadata = 0;
3798 streamRequested.stream = request->output_buffers[i].stream;
3799 internallyRequestedStreams.push_back(streamRequested);
3800 }
3801 }
3802 request->num_output_buffers = 0;
3803 auto itr = internallyRequestedStreams.begin();
3804
3805 /* Modify setting to set compensation */
3806 modified_meta = request->settings;
3807 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3808 uint8_t aeLock = 1;
3809 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3810 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3811 camera_metadata_t *modified_settings = modified_meta.release();
3812 request->settings = modified_settings;
3813
3814 /* Capture Settling & -2x frame */
3815 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3816 request->frame_number = internalFrameNumber;
3817 processCaptureRequest(request, internallyRequestedStreams);
3818
3819 request->num_output_buffers = originalOutputCount;
3820 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3821 request->frame_number = internalFrameNumber;
3822 processCaptureRequest(request, emptyInternalList);
3823 request->num_output_buffers = 0;
3824
3825 modified_meta = modified_settings;
3826 expCompensation = 0;
3827 aeLock = 1;
3828 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3829 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3830 modified_settings = modified_meta.release();
3831 request->settings = modified_settings;
3832
3833 /* Capture Settling & 0X frame */
3834
3835 itr = internallyRequestedStreams.begin();
3836 if (itr == internallyRequestedStreams.end()) {
3837 LOGE("Error Internally Requested Stream list is empty");
3838 assert(0);
3839 } else {
3840 itr->need_metadata = 0;
3841 itr->meteringOnly = 1;
3842 }
3843
3844 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3845 request->frame_number = internalFrameNumber;
3846 processCaptureRequest(request, internallyRequestedStreams);
3847
3848 itr = internallyRequestedStreams.begin();
3849 if (itr == internallyRequestedStreams.end()) {
3850 ALOGE("Error Internally Requested Stream list is empty");
3851 assert(0);
3852 } else {
3853 itr->need_metadata = 1;
3854 itr->meteringOnly = 0;
3855 }
3856
3857 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3858 request->frame_number = internalFrameNumber;
3859 processCaptureRequest(request, internallyRequestedStreams);
3860
3861 /* Capture 2X frame*/
3862 modified_meta = modified_settings;
3863 expCompensation = GB_HDR_2X_STEP_EV;
3864 aeLock = 1;
3865 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3866 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3867 modified_settings = modified_meta.release();
3868 request->settings = modified_settings;
3869
3870 itr = internallyRequestedStreams.begin();
3871 if (itr == internallyRequestedStreams.end()) {
3872 ALOGE("Error Internally Requested Stream list is empty");
3873 assert(0);
3874 } else {
3875 itr->need_metadata = 0;
3876 itr->meteringOnly = 1;
3877 }
3878 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3879 request->frame_number = internalFrameNumber;
3880 processCaptureRequest(request, internallyRequestedStreams);
3881
3882 itr = internallyRequestedStreams.begin();
3883 if (itr == internallyRequestedStreams.end()) {
3884 ALOGE("Error Internally Requested Stream list is empty");
3885 assert(0);
3886 } else {
3887 itr->need_metadata = 1;
3888 itr->meteringOnly = 0;
3889 }
3890
3891 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3892 request->frame_number = internalFrameNumber;
3893 processCaptureRequest(request, internallyRequestedStreams);
3894
3895
3896 /* Capture 2X on original streaming config*/
3897 internallyRequestedStreams.clear();
3898
3899 /* Restore original settings pointer */
3900 request->settings = original_settings;
3901 } else {
3902 uint32_t internalFrameNumber;
3903 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3904 request->frame_number = internalFrameNumber;
3905 return processCaptureRequest(request, internallyRequestedStreams);
3906 }
3907
3908 return NO_ERROR;
3909}
3910
3911/*===========================================================================
3912 * FUNCTION : orchestrateResult
3913 *
3914 * DESCRIPTION: Orchestrates a capture result to camera service
3915 *
3916 * PARAMETERS :
3917 * @request : request from framework to process
3918 *
3919 * RETURN :
3920 *
3921 *==========================================================================*/
3922void QCamera3HardwareInterface::orchestrateResult(
3923 camera3_capture_result_t *result)
3924{
3925 uint32_t frameworkFrameNumber;
3926 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3927 frameworkFrameNumber);
3928 if (rc != NO_ERROR) {
3929 LOGE("Cannot find translated frameworkFrameNumber");
3930 assert(0);
3931 } else {
3932 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3933 LOGD("CAM_DEBUG Internal Request drop the result");
3934 } else {
3935 result->frame_number = frameworkFrameNumber;
3936 mCallbackOps->process_capture_result(mCallbackOps, result);
3937 }
3938 }
3939}
3940
3941/*===========================================================================
3942 * FUNCTION : orchestrateNotify
3943 *
3944 * DESCRIPTION: Orchestrates a notify to camera service
3945 *
3946 * PARAMETERS :
3947 * @request : request from framework to process
3948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3953{
3954 uint32_t frameworkFrameNumber;
3955 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3956 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3957 frameworkFrameNumber);
3958 if (rc != NO_ERROR) {
3959 LOGE("Cannot find translated frameworkFrameNumber");
3960 assert(0);
3961 } else {
3962 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3963 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3964 } else {
3965 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3966 mCallbackOps->notify(mCallbackOps, notify_msg);
3967 }
3968 }
3969}
3970
3971/*===========================================================================
3972 * FUNCTION : FrameNumberRegistry
3973 *
3974 * DESCRIPTION: Constructor
3975 *
3976 * PARAMETERS :
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981FrameNumberRegistry::FrameNumberRegistry()
3982{
3983 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3984}
3985
3986/*===========================================================================
3987 * FUNCTION : ~FrameNumberRegistry
3988 *
3989 * DESCRIPTION: Destructor
3990 *
3991 * PARAMETERS :
3992 *
3993 * RETURN :
3994 *
3995 *==========================================================================*/
3996FrameNumberRegistry::~FrameNumberRegistry()
3997{
3998}
3999
4000/*===========================================================================
4001 * FUNCTION : PurgeOldEntriesLocked
4002 *
4003 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4004 *
4005 * PARAMETERS :
4006 *
4007 * RETURN : NONE
4008 *
4009 *==========================================================================*/
4010void FrameNumberRegistry::purgeOldEntriesLocked()
4011{
4012 while (_register.begin() != _register.end()) {
4013 auto itr = _register.begin();
4014 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4015 _register.erase(itr);
4016 } else {
4017 return;
4018 }
4019 }
4020}
4021
4022/*===========================================================================
4023 * FUNCTION : allocStoreInternalFrameNumber
4024 *
4025 * DESCRIPTION: Method to note down a framework request and associate a new
4026 * internal request number against it
4027 *
4028 * PARAMETERS :
4029 * @fFrameNumber: Identifier given by framework
4030 * @internalFN : Output parameter which will have the newly generated internal
4031 * entry
4032 *
4033 * RETURN : Error code
4034 *
4035 *==========================================================================*/
4036int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4037 uint32_t &internalFrameNumber)
4038{
4039 Mutex::Autolock lock(mRegistryLock);
4040 internalFrameNumber = _nextFreeInternalNumber++;
4041 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4042 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4043 purgeOldEntriesLocked();
4044 return NO_ERROR;
4045}
4046
4047/*===========================================================================
4048 * FUNCTION : generateStoreInternalFrameNumber
4049 *
4050 * DESCRIPTION: Method to associate a new internal request number independent
4051 * of any associate with framework requests
4052 *
4053 * PARAMETERS :
4054 * @internalFrame#: Output parameter which will have the newly generated internal
4055 *
4056 *
4057 * RETURN : Error code
4058 *
4059 *==========================================================================*/
4060int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4061{
4062 Mutex::Autolock lock(mRegistryLock);
4063 internalFrameNumber = _nextFreeInternalNumber++;
4064 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4065 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4066 purgeOldEntriesLocked();
4067 return NO_ERROR;
4068}
4069
4070/*===========================================================================
4071 * FUNCTION : getFrameworkFrameNumber
4072 *
4073 * DESCRIPTION: Method to query the framework framenumber given an internal #
4074 *
4075 * PARAMETERS :
4076 * @internalFrame#: Internal reference
4077 * @frameworkframenumber: Output parameter holding framework frame entry
4078 *
4079 * RETURN : Error code
4080 *
4081 *==========================================================================*/
4082int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4083 uint32_t &frameworkFrameNumber)
4084{
4085 Mutex::Autolock lock(mRegistryLock);
4086 auto itr = _register.find(internalFrameNumber);
4087 if (itr == _register.end()) {
4088 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
4089 return -ENOENT;
4090 }
4091
4092 frameworkFrameNumber = itr->second;
4093 purgeOldEntriesLocked();
4094 return NO_ERROR;
4095}
Thierry Strudel3d639192016-09-09 11:52:26 -07004096
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004097status_t QCamera3HardwareInterface::fillPbStreamConfig(
4098 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4099 QCamera3Channel *channel, uint32_t streamIndex) {
4100 if (config == nullptr) {
4101 LOGE("%s: config is null", __FUNCTION__);
4102 return BAD_VALUE;
4103 }
4104
4105 if (channel == nullptr) {
4106 LOGE("%s: channel is null", __FUNCTION__);
4107 return BAD_VALUE;
4108 }
4109
4110 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4111 if (stream == nullptr) {
4112 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4113 return NAME_NOT_FOUND;
4114 }
4115
4116 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4117 if (streamInfo == nullptr) {
4118 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4119 return NAME_NOT_FOUND;
4120 }
4121
4122 config->id = pbStreamId;
4123 config->image.width = streamInfo->dim.width;
4124 config->image.height = streamInfo->dim.height;
4125 config->image.padding = 0;
4126 config->image.format = pbStreamFormat;
4127
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128 uint32_t totalPlaneSize = 0;
4129
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004130 // Fill plane information.
4131 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4132 pbcamera::PlaneConfiguration plane;
4133 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4134 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4135 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004136
4137 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004138 }
4139
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004140 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004141 return OK;
4142}
4143
Thierry Strudel3d639192016-09-09 11:52:26 -07004144/*===========================================================================
4145 * FUNCTION : processCaptureRequest
4146 *
4147 * DESCRIPTION: process a capture request from camera service
4148 *
4149 * PARAMETERS :
4150 * @request : request from framework to process
4151 *
4152 * RETURN :
4153 *
4154 *==========================================================================*/
4155int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004156 camera3_capture_request_t *request,
4157 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004158{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004159 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004160 int rc = NO_ERROR;
4161 int32_t request_id;
4162 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004163 bool isVidBufRequested = false;
4164 camera3_stream_buffer_t *pInputBuffer = NULL;
4165
4166 pthread_mutex_lock(&mMutex);
4167
4168 // Validate current state
4169 switch (mState) {
4170 case CONFIGURED:
4171 case STARTED:
4172 /* valid state */
4173 break;
4174
4175 case ERROR:
4176 pthread_mutex_unlock(&mMutex);
4177 handleCameraDeviceError();
4178 return -ENODEV;
4179
4180 default:
4181 LOGE("Invalid state %d", mState);
4182 pthread_mutex_unlock(&mMutex);
4183 return -ENODEV;
4184 }
4185
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004186 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004187 if (rc != NO_ERROR) {
4188 LOGE("incoming request is not valid");
4189 pthread_mutex_unlock(&mMutex);
4190 return rc;
4191 }
4192
4193 meta = request->settings;
4194
4195 // For first capture request, send capture intent, and
4196 // stream on all streams
4197 if (mState == CONFIGURED) {
4198 // send an unconfigure to the backend so that the isp
4199 // resources are deallocated
4200 if (!mFirstConfiguration) {
4201 cam_stream_size_info_t stream_config_info;
4202 int32_t hal_version = CAM_HAL_V3;
4203 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4204 stream_config_info.buffer_info.min_buffers =
4205 MIN_INFLIGHT_REQUESTS;
4206 stream_config_info.buffer_info.max_buffers =
4207 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4208 clear_metadata_buffer(mParameters);
4209 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4210 CAM_INTF_PARM_HAL_VERSION, hal_version);
4211 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4212 CAM_INTF_META_STREAM_INFO, stream_config_info);
4213 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4214 mParameters);
4215 if (rc < 0) {
4216 LOGE("set_parms for unconfigure failed");
4217 pthread_mutex_unlock(&mMutex);
4218 return rc;
4219 }
4220 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004221 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004222 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004223 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004224 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225 property_get("persist.camera.is_type", is_type_value, "4");
4226 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4227 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4228 property_get("persist.camera.is_type_preview", is_type_value, "4");
4229 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4230 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004231
4232 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4233 int32_t hal_version = CAM_HAL_V3;
4234 uint8_t captureIntent =
4235 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4236 mCaptureIntent = captureIntent;
4237 clear_metadata_buffer(mParameters);
4238 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4239 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4240 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004241 if (mFirstConfiguration) {
4242 // configure instant AEC
4243 // Instant AEC is a session based parameter and it is needed only
4244 // once per complete session after open camera.
4245 // i.e. This is set only once for the first capture request, after open camera.
4246 setInstantAEC(meta);
4247 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004248 uint8_t fwkVideoStabMode=0;
4249 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4250 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4251 }
4252
4253 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4254 // turn it on for video/preview
4255 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4256 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004257 int32_t vsMode;
4258 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4260 rc = BAD_VALUE;
4261 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004262 LOGD("setEis %d", setEis);
4263 bool eis3Supported = false;
4264 size_t count = IS_TYPE_MAX;
4265 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4266 for (size_t i = 0; i < count; i++) {
4267 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4268 eis3Supported = true;
4269 break;
4270 }
4271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004272
4273 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004274 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004275 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4276 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004277 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4278 is_type = isTypePreview;
4279 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4280 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4281 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004282 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004283 } else {
4284 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004285 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004286 } else {
4287 is_type = IS_TYPE_NONE;
4288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004289 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004290 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004291 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4292 }
4293 }
4294
4295 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4296 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4297
4298 int32_t tintless_value = 1;
4299 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4300 CAM_INTF_PARM_TINTLESS, tintless_value);
4301 //Disable CDS for HFR mode or if DIS/EIS is on.
4302 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4303 //after every configure_stream
4304 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4305 (m_bIsVideo)) {
4306 int32_t cds = CAM_CDS_MODE_OFF;
4307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4308 CAM_INTF_PARM_CDS_MODE, cds))
4309 LOGE("Failed to disable CDS for HFR mode");
4310
4311 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004312
4313 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4314 uint8_t* use_av_timer = NULL;
4315
4316 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004317 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004318 use_av_timer = &m_debug_avtimer;
4319 }
4320 else{
4321 use_av_timer =
4322 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004323 if (use_av_timer) {
4324 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4325 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004326 }
4327
4328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4329 rc = BAD_VALUE;
4330 }
4331 }
4332
Thierry Strudel3d639192016-09-09 11:52:26 -07004333 setMobicat();
4334
4335 /* Set fps and hfr mode while sending meta stream info so that sensor
4336 * can configure appropriate streaming mode */
4337 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004338 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4339 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004340 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4341 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004342 if (rc == NO_ERROR) {
4343 int32_t max_fps =
4344 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004345 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004346 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4347 }
4348 /* For HFR, more buffers are dequeued upfront to improve the performance */
4349 if (mBatchSize) {
4350 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4351 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4352 }
4353 }
4354 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004355 LOGE("setHalFpsRange failed");
4356 }
4357 }
4358 if (meta.exists(ANDROID_CONTROL_MODE)) {
4359 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4360 rc = extractSceneMode(meta, metaMode, mParameters);
4361 if (rc != NO_ERROR) {
4362 LOGE("extractSceneMode failed");
4363 }
4364 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004365 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004366
Thierry Strudel04e026f2016-10-10 11:27:36 -07004367 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4368 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4369 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4370 rc = setVideoHdrMode(mParameters, vhdr);
4371 if (rc != NO_ERROR) {
4372 LOGE("setVideoHDR is failed");
4373 }
4374 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004375
Thierry Strudel3d639192016-09-09 11:52:26 -07004376 //TODO: validate the arguments, HSV scenemode should have only the
4377 //advertised fps ranges
4378
4379 /*set the capture intent, hal version, tintless, stream info,
4380 *and disenable parameters to the backend*/
4381 LOGD("set_parms META_STREAM_INFO " );
4382 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4383 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004384 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 mStreamConfigInfo.type[i],
4386 mStreamConfigInfo.stream_sizes[i].width,
4387 mStreamConfigInfo.stream_sizes[i].height,
4388 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004389 mStreamConfigInfo.format[i],
4390 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004391 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004392
Thierry Strudel3d639192016-09-09 11:52:26 -07004393 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4394 mParameters);
4395 if (rc < 0) {
4396 LOGE("set_parms failed for hal version, stream info");
4397 }
4398
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004399 cam_sensor_mode_info_t sensor_mode_info;
4400 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4401 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004402 if (rc != NO_ERROR) {
4403 LOGE("Failed to get sensor output size");
4404 pthread_mutex_unlock(&mMutex);
4405 goto error_exit;
4406 }
4407
4408 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4409 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004410 sensor_mode_info.active_array_size.width,
4411 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004412
4413 /* Set batchmode before initializing channel. Since registerBuffer
4414 * internally initializes some of the channels, better set batchmode
4415 * even before first register buffer */
4416 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4417 it != mStreamInfo.end(); it++) {
4418 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4419 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4420 && mBatchSize) {
4421 rc = channel->setBatchSize(mBatchSize);
4422 //Disable per frame map unmap for HFR/batchmode case
4423 rc |= channel->setPerFrameMapUnmap(false);
4424 if (NO_ERROR != rc) {
4425 LOGE("Channel init failed %d", rc);
4426 pthread_mutex_unlock(&mMutex);
4427 goto error_exit;
4428 }
4429 }
4430 }
4431
4432 //First initialize all streams
4433 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4434 it != mStreamInfo.end(); it++) {
4435 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4436 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4437 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004438 setEis) {
4439 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4440 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4441 is_type = mStreamConfigInfo.is_type[i];
4442 break;
4443 }
4444 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004445 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004446 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004447 rc = channel->initialize(IS_TYPE_NONE);
4448 }
4449 if (NO_ERROR != rc) {
4450 LOGE("Channel initialization failed %d", rc);
4451 pthread_mutex_unlock(&mMutex);
4452 goto error_exit;
4453 }
4454 }
4455
4456 if (mRawDumpChannel) {
4457 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4458 if (rc != NO_ERROR) {
4459 LOGE("Error: Raw Dump Channel init failed");
4460 pthread_mutex_unlock(&mMutex);
4461 goto error_exit;
4462 }
4463 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004464 if (mHdrPlusRawSrcChannel) {
4465 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4466 if (rc != NO_ERROR) {
4467 LOGE("Error: HDR+ RAW Source Channel init failed");
4468 pthread_mutex_unlock(&mMutex);
4469 goto error_exit;
4470 }
4471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004472 if (mSupportChannel) {
4473 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4474 if (rc < 0) {
4475 LOGE("Support channel initialization failed");
4476 pthread_mutex_unlock(&mMutex);
4477 goto error_exit;
4478 }
4479 }
4480 if (mAnalysisChannel) {
4481 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4482 if (rc < 0) {
4483 LOGE("Analysis channel initialization failed");
4484 pthread_mutex_unlock(&mMutex);
4485 goto error_exit;
4486 }
4487 }
4488 if (mDummyBatchChannel) {
4489 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4490 if (rc < 0) {
4491 LOGE("mDummyBatchChannel setBatchSize failed");
4492 pthread_mutex_unlock(&mMutex);
4493 goto error_exit;
4494 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004495 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004496 if (rc < 0) {
4497 LOGE("mDummyBatchChannel initialization failed");
4498 pthread_mutex_unlock(&mMutex);
4499 goto error_exit;
4500 }
4501 }
4502
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004503 // Configure stream for HDR+.
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004504 if (mHdrPlusClient != nullptr) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004505 rc = configureHdrPlusStreamsLocked(sensor_mode_info);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004506 if (rc != OK) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004507 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004508 pthread_mutex_unlock(&mMutex);
4509 goto error_exit;
4510 }
4511 }
4512
Thierry Strudel3d639192016-09-09 11:52:26 -07004513 // Set bundle info
4514 rc = setBundleInfo();
4515 if (rc < 0) {
4516 LOGE("setBundleInfo failed %d", rc);
4517 pthread_mutex_unlock(&mMutex);
4518 goto error_exit;
4519 }
4520
4521 //update settings from app here
4522 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4523 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4524 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4525 }
4526 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4527 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4528 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4529 }
4530 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4531 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4532 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4533
4534 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4535 (mLinkedCameraId != mCameraId) ) {
4536 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4537 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004538 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004539 goto error_exit;
4540 }
4541 }
4542
4543 // add bundle related cameras
4544 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4545 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004546 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4547 &m_pDualCamCmdPtr->bundle_info;
4548 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004549 if (mIsDeviceLinked)
4550 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4551 else
4552 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4553
4554 pthread_mutex_lock(&gCamLock);
4555
4556 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4557 LOGE("Dualcam: Invalid Session Id ");
4558 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004559 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004560 goto error_exit;
4561 }
4562
4563 if (mIsMainCamera == 1) {
4564 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4565 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004566 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004567 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 // related session id should be session id of linked session
4569 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4570 } else {
4571 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4572 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004573 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004574 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4576 }
4577 pthread_mutex_unlock(&gCamLock);
4578
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004579 rc = mCameraHandle->ops->set_dual_cam_cmd(
4580 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004581 if (rc < 0) {
4582 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004583 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004584 goto error_exit;
4585 }
4586 }
4587
4588 //Then start them.
4589 LOGH("Start META Channel");
4590 rc = mMetadataChannel->start();
4591 if (rc < 0) {
4592 LOGE("META channel start failed");
4593 pthread_mutex_unlock(&mMutex);
4594 goto error_exit;
4595 }
4596
4597 if (mAnalysisChannel) {
4598 rc = mAnalysisChannel->start();
4599 if (rc < 0) {
4600 LOGE("Analysis channel start failed");
4601 mMetadataChannel->stop();
4602 pthread_mutex_unlock(&mMutex);
4603 goto error_exit;
4604 }
4605 }
4606
4607 if (mSupportChannel) {
4608 rc = mSupportChannel->start();
4609 if (rc < 0) {
4610 LOGE("Support channel start failed");
4611 mMetadataChannel->stop();
4612 /* Although support and analysis are mutually exclusive today
4613 adding it in anycase for future proofing */
4614 if (mAnalysisChannel) {
4615 mAnalysisChannel->stop();
4616 }
4617 pthread_mutex_unlock(&mMutex);
4618 goto error_exit;
4619 }
4620 }
4621 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4622 it != mStreamInfo.end(); it++) {
4623 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4624 LOGH("Start Processing Channel mask=%d",
4625 channel->getStreamTypeMask());
4626 rc = channel->start();
4627 if (rc < 0) {
4628 LOGE("channel start failed");
4629 pthread_mutex_unlock(&mMutex);
4630 goto error_exit;
4631 }
4632 }
4633
4634 if (mRawDumpChannel) {
4635 LOGD("Starting raw dump stream");
4636 rc = mRawDumpChannel->start();
4637 if (rc != NO_ERROR) {
4638 LOGE("Error Starting Raw Dump Channel");
4639 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4640 it != mStreamInfo.end(); it++) {
4641 QCamera3Channel *channel =
4642 (QCamera3Channel *)(*it)->stream->priv;
4643 LOGH("Stopping Processing Channel mask=%d",
4644 channel->getStreamTypeMask());
4645 channel->stop();
4646 }
4647 if (mSupportChannel)
4648 mSupportChannel->stop();
4649 if (mAnalysisChannel) {
4650 mAnalysisChannel->stop();
4651 }
4652 mMetadataChannel->stop();
4653 pthread_mutex_unlock(&mMutex);
4654 goto error_exit;
4655 }
4656 }
4657
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004658 if (mHdrPlusRawSrcChannel) {
4659 LOGD("Starting HDR+ RAW stream");
4660 rc = mHdrPlusRawSrcChannel->start();
4661 if (rc != NO_ERROR) {
4662 LOGE("Error Starting HDR+ RAW Channel");
4663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4664 it != mStreamInfo.end(); it++) {
4665 QCamera3Channel *channel =
4666 (QCamera3Channel *)(*it)->stream->priv;
4667 LOGH("Stopping Processing Channel mask=%d",
4668 channel->getStreamTypeMask());
4669 channel->stop();
4670 }
4671 if (mSupportChannel)
4672 mSupportChannel->stop();
4673 if (mAnalysisChannel) {
4674 mAnalysisChannel->stop();
4675 }
4676 if (mRawDumpChannel) {
4677 mRawDumpChannel->stop();
4678 }
4679 mMetadataChannel->stop();
4680 pthread_mutex_unlock(&mMutex);
4681 goto error_exit;
4682 }
4683 }
4684
Thierry Strudel3d639192016-09-09 11:52:26 -07004685 if (mChannelHandle) {
4686
4687 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4688 mChannelHandle);
4689 if (rc != NO_ERROR) {
4690 LOGE("start_channel failed %d", rc);
4691 pthread_mutex_unlock(&mMutex);
4692 goto error_exit;
4693 }
4694 }
4695
4696 goto no_error;
4697error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004698 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004699 return rc;
4700no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004701 mWokenUpByDaemon = false;
4702 mPendingLiveRequest = 0;
4703 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004704 }
4705
4706 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004707 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004708
4709 if (mFlushPerf) {
4710 //we cannot accept any requests during flush
4711 LOGE("process_capture_request cannot proceed during flush");
4712 pthread_mutex_unlock(&mMutex);
4713 return NO_ERROR; //should return an error
4714 }
4715
4716 if (meta.exists(ANDROID_REQUEST_ID)) {
4717 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4718 mCurrentRequestId = request_id;
4719 LOGD("Received request with id: %d", request_id);
4720 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4721 LOGE("Unable to find request id field, \
4722 & no previous id available");
4723 pthread_mutex_unlock(&mMutex);
4724 return NAME_NOT_FOUND;
4725 } else {
4726 LOGD("Re-using old request id");
4727 request_id = mCurrentRequestId;
4728 }
4729
4730 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4731 request->num_output_buffers,
4732 request->input_buffer,
4733 frameNumber);
4734 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004735 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004736 int blob_request = 0;
4737 uint32_t snapshotStreamId = 0;
4738 for (size_t i = 0; i < request->num_output_buffers; i++) {
4739 const camera3_stream_buffer_t& output = request->output_buffers[i];
4740 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4741
4742 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004743 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004744 blob_request = 1;
4745 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4746 }
4747
4748 if (output.acquire_fence != -1) {
4749 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4750 close(output.acquire_fence);
4751 if (rc != OK) {
4752 LOGE("sync wait failed %d", rc);
4753 pthread_mutex_unlock(&mMutex);
4754 return rc;
4755 }
4756 }
4757
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004758 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004760
4761 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4762 isVidBufRequested = true;
4763 }
4764 }
4765
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004766 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4767 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4768 itr++) {
4769 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4770 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4771 channel->getStreamID(channel->getStreamTypeMask());
4772
4773 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4774 isVidBufRequested = true;
4775 }
4776 }
4777
Thierry Strudel3d639192016-09-09 11:52:26 -07004778 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004779 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004780 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 }
4782 if (blob_request && mRawDumpChannel) {
4783 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004784 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004785 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004786 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004787 }
4788
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004789 {
4790 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4791 // Request a RAW buffer if
4792 // 1. mHdrPlusRawSrcChannel is valid.
4793 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
4794 // 3. There is no pending HDR+ request.
4795 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
4796 mHdrPlusPendingRequests.size() == 0) {
4797 streamsArray.stream_request[streamsArray.num_streams].streamID =
4798 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4799 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4800 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004801 }
4802
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004803 //extract capture intent
4804 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4805 mCaptureIntent =
4806 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4807 }
4808
4809 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4810 mCacMode =
4811 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4812 }
4813
4814 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004815 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004816
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004817 // If this request has a still capture intent, try to submit an HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004818 if (mHdrPlusClient != nullptr &&
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -08004819 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
4820 hdrPlusRequest = trySubmittingHdrPlusRequest(&pendingHdrPlusRequest, *request, meta);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004821 }
4822
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004823 if (hdrPlusRequest) {
4824 // For a HDR+ request, just set the frame parameters.
4825 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
4826 if (rc < 0) {
4827 LOGE("fail to set frame parameters");
4828 pthread_mutex_unlock(&mMutex);
4829 return rc;
4830 }
4831 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 /* Parse the settings:
4833 * - For every request in NORMAL MODE
4834 * - For every request in HFR mode during preview only case
4835 * - For first request of every batch in HFR mode during video
4836 * recording. In batchmode the same settings except frame number is
4837 * repeated in each request of the batch.
4838 */
4839 if (!mBatchSize ||
4840 (mBatchSize && !isVidBufRequested) ||
4841 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 if (rc < 0) {
4844 LOGE("fail to set frame parameters");
4845 pthread_mutex_unlock(&mMutex);
4846 return rc;
4847 }
4848 }
4849 /* For batchMode HFR, setFrameParameters is not called for every
4850 * request. But only frame number of the latest request is parsed.
4851 * Keep track of first and last frame numbers in a batch so that
4852 * metadata for the frame numbers of batch can be duplicated in
4853 * handleBatchMetadta */
4854 if (mBatchSize) {
4855 if (!mToBeQueuedVidBufs) {
4856 //start of the batch
4857 mFirstFrameNumberInBatch = request->frame_number;
4858 }
4859 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4860 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4861 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004862 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 return BAD_VALUE;
4864 }
4865 }
4866 if (mNeedSensorRestart) {
4867 /* Unlock the mutex as restartSensor waits on the channels to be
4868 * stopped, which in turn calls stream callback functions -
4869 * handleBufferWithLock and handleMetadataWithLock */
4870 pthread_mutex_unlock(&mMutex);
4871 rc = dynamicUpdateMetaStreamInfo();
4872 if (rc != NO_ERROR) {
4873 LOGE("Restarting the sensor failed");
4874 return BAD_VALUE;
4875 }
4876 mNeedSensorRestart = false;
4877 pthread_mutex_lock(&mMutex);
4878 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004879 if(mResetInstantAEC) {
4880 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4881 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4882 mResetInstantAEC = false;
4883 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004884 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004885 if (request->input_buffer->acquire_fence != -1) {
4886 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4887 close(request->input_buffer->acquire_fence);
4888 if (rc != OK) {
4889 LOGE("input buffer sync wait failed %d", rc);
4890 pthread_mutex_unlock(&mMutex);
4891 return rc;
4892 }
4893 }
4894 }
4895
4896 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4897 mLastCustIntentFrmNum = frameNumber;
4898 }
4899 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004900 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07004901 pendingRequestIterator latestRequest;
4902 pendingRequest.frame_number = frameNumber;
4903 pendingRequest.num_buffers = request->num_output_buffers;
4904 pendingRequest.request_id = request_id;
4905 pendingRequest.blob_request = blob_request;
4906 pendingRequest.timestamp = 0;
4907 pendingRequest.bUrgentReceived = 0;
4908 if (request->input_buffer) {
4909 pendingRequest.input_buffer =
4910 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4911 *(pendingRequest.input_buffer) = *(request->input_buffer);
4912 pInputBuffer = pendingRequest.input_buffer;
4913 } else {
4914 pendingRequest.input_buffer = NULL;
4915 pInputBuffer = NULL;
4916 }
4917
4918 pendingRequest.pipeline_depth = 0;
4919 pendingRequest.partial_result_cnt = 0;
4920 extractJpegMetadata(mCurJpegMeta, request);
4921 pendingRequest.jpegMetadata = mCurJpegMeta;
4922 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4923 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004925 /* DevCamDebug metadata processCaptureRequest */
4926 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4927 mDevCamDebugMetaEnable =
4928 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4929 }
4930 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4931 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004932
4933 //extract CAC info
4934 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4935 mCacMode =
4936 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4937 }
4938 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004939 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07004940
4941 PendingBuffersInRequest bufsForCurRequest;
4942 bufsForCurRequest.frame_number = frameNumber;
4943 // Mark current timestamp for the new request
4944 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004945 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07004946
Chien-Yu Chen92724a82017-01-06 11:50:30 -08004947 if (hdrPlusRequest) {
4948 // Save settings for this request.
4949 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
4950 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
4951
4952 // Add to pending HDR+ request queue.
4953 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4954 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
4955
4956 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 for (size_t i = 0; i < request->num_output_buffers; i++) {
4960 RequestedBufferInfo requestedBuf;
4961 memset(&requestedBuf, 0, sizeof(requestedBuf));
4962 requestedBuf.stream = request->output_buffers[i].stream;
4963 requestedBuf.buffer = NULL;
4964 pendingRequest.buffers.push_back(requestedBuf);
4965
4966 // Add to buffer handle the pending buffers list
4967 PendingBufferInfo bufferInfo;
4968 bufferInfo.buffer = request->output_buffers[i].buffer;
4969 bufferInfo.stream = request->output_buffers[i].stream;
4970 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4971 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4972 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4973 frameNumber, bufferInfo.buffer,
4974 channel->getStreamTypeMask(), bufferInfo.stream->format);
4975 }
4976 // Add this request packet into mPendingBuffersMap
4977 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4978 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4979 mPendingBuffersMap.get_num_overall_buffers());
4980
4981 latestRequest = mPendingRequestsList.insert(
4982 mPendingRequestsList.end(), pendingRequest);
4983 if(mFlush) {
4984 LOGI("mFlush is true");
4985 pthread_mutex_unlock(&mMutex);
4986 return NO_ERROR;
4987 }
4988
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004989 // If this is not an HDR+ request, send the request to metadata and each output buffer's
4990 // channel.
4991 if (!hdrPlusRequest) {
4992 int indexUsed;
4993 // Notify metadata channel we receive a request
4994 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004995
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004996 if(request->input_buffer != NULL){
4997 LOGD("Input request, frame_number %d", frameNumber);
4998 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4999 if (NO_ERROR != rc) {
5000 LOGE("fail to set reproc parameters");
5001 pthread_mutex_unlock(&mMutex);
5002 return rc;
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 }
5005
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005006 // Call request on other streams
5007 uint32_t streams_need_metadata = 0;
5008 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5009 for (size_t i = 0; i < request->num_output_buffers; i++) {
5010 const camera3_stream_buffer_t& output = request->output_buffers[i];
5011 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5012
5013 if (channel == NULL) {
5014 LOGW("invalid channel pointer for stream");
5015 continue;
5016 }
5017
5018 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5019 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5020 output.buffer, request->input_buffer, frameNumber);
5021 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005022 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005023 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5024 if (rc < 0) {
5025 LOGE("Fail to request on picture channel");
5026 pthread_mutex_unlock(&mMutex);
5027 return rc;
5028 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005030 LOGD("snapshot request with buffer %p, frame_number %d",
5031 output.buffer, frameNumber);
5032 if (!request->settings) {
5033 rc = channel->request(output.buffer, frameNumber,
5034 NULL, mPrevParameters, indexUsed);
5035 } else {
5036 rc = channel->request(output.buffer, frameNumber,
5037 NULL, mParameters, indexUsed);
5038 }
5039 if (rc < 0) {
5040 LOGE("Fail to request on picture channel");
5041 pthread_mutex_unlock(&mMutex);
5042 return rc;
5043 }
5044
5045 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5046 uint32_t j = 0;
5047 for (j = 0; j < streamsArray.num_streams; j++) {
5048 if (streamsArray.stream_request[j].streamID == streamId) {
5049 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5050 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5051 else
5052 streamsArray.stream_request[j].buf_index = indexUsed;
5053 break;
5054 }
5055 }
5056 if (j == streamsArray.num_streams) {
5057 LOGE("Did not find matching stream to update index");
5058 assert(0);
5059 }
5060
5061 pendingBufferIter->need_metadata = true;
5062 streams_need_metadata++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005064 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5065 bool needMetadata = false;
5066 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5067 rc = yuvChannel->request(output.buffer, frameNumber,
5068 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5069 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005070 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005071 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 pthread_mutex_unlock(&mMutex);
5073 return rc;
5074 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005075
5076 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5077 uint32_t j = 0;
5078 for (j = 0; j < streamsArray.num_streams; j++) {
5079 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005080 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5081 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5082 else
5083 streamsArray.stream_request[j].buf_index = indexUsed;
5084 break;
5085 }
5086 }
5087 if (j == streamsArray.num_streams) {
5088 LOGE("Did not find matching stream to update index");
5089 assert(0);
5090 }
5091
5092 pendingBufferIter->need_metadata = needMetadata;
5093 if (needMetadata)
5094 streams_need_metadata += 1;
5095 LOGD("calling YUV channel request, need_metadata is %d",
5096 needMetadata);
5097 } else {
5098 LOGD("request with buffer %p, frame_number %d",
5099 output.buffer, frameNumber);
5100
5101 rc = channel->request(output.buffer, frameNumber, indexUsed);
5102
5103 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5104 uint32_t j = 0;
5105 for (j = 0; j < streamsArray.num_streams; j++) {
5106 if (streamsArray.stream_request[j].streamID == streamId) {
5107 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5108 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5109 else
5110 streamsArray.stream_request[j].buf_index = indexUsed;
5111 break;
5112 }
5113 }
5114 if (j == streamsArray.num_streams) {
5115 LOGE("Did not find matching stream to update index");
5116 assert(0);
5117 }
5118
5119 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5120 && mBatchSize) {
5121 mToBeQueuedVidBufs++;
5122 if (mToBeQueuedVidBufs == mBatchSize) {
5123 channel->queueBatchBuf();
5124 }
5125 }
5126 if (rc < 0) {
5127 LOGE("request failed");
5128 pthread_mutex_unlock(&mMutex);
5129 return rc;
5130 }
5131 }
5132 pendingBufferIter++;
5133 }
5134
5135 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5136 itr++) {
5137 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5138
5139 if (channel == NULL) {
5140 LOGE("invalid channel pointer for stream");
5141 assert(0);
5142 return BAD_VALUE;
5143 }
5144
5145 InternalRequest requestedStream;
5146 requestedStream = (*itr);
5147
5148
5149 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5150 LOGD("snapshot request internally input buffer %p, frame_number %d",
5151 request->input_buffer, frameNumber);
5152 if(request->input_buffer != NULL){
5153 rc = channel->request(NULL, frameNumber,
5154 pInputBuffer, &mReprocMeta, indexUsed, true,
5155 requestedStream.meteringOnly);
5156 if (rc < 0) {
5157 LOGE("Fail to request on picture channel");
5158 pthread_mutex_unlock(&mMutex);
5159 return rc;
5160 }
5161 } else {
5162 LOGD("snapshot request with frame_number %d", frameNumber);
5163 if (!request->settings) {
5164 rc = channel->request(NULL, frameNumber,
5165 NULL, mPrevParameters, indexUsed, true,
5166 requestedStream.meteringOnly);
5167 } else {
5168 rc = channel->request(NULL, frameNumber,
5169 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5170 }
5171 if (rc < 0) {
5172 LOGE("Fail to request on picture channel");
5173 pthread_mutex_unlock(&mMutex);
5174 return rc;
5175 }
5176
5177 if ((*itr).meteringOnly != 1) {
5178 requestedStream.need_metadata = 1;
5179 streams_need_metadata++;
5180 }
5181 }
5182
5183 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5184 uint32_t j = 0;
5185 for (j = 0; j < streamsArray.num_streams; j++) {
5186 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005187 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5188 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5189 else
5190 streamsArray.stream_request[j].buf_index = indexUsed;
5191 break;
5192 }
5193 }
5194 if (j == streamsArray.num_streams) {
5195 LOGE("Did not find matching stream to update index");
5196 assert(0);
5197 }
5198
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005199 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005200 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005201 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005202 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005203 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005204 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005205 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005206
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005207 //If 2 streams have need_metadata set to true, fail the request, unless
5208 //we copy/reference count the metadata buffer
5209 if (streams_need_metadata > 1) {
5210 LOGE("not supporting request in which two streams requires"
5211 " 2 HAL metadata for reprocessing");
5212 pthread_mutex_unlock(&mMutex);
5213 return -EINVAL;
5214 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005215
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005216 if (request->input_buffer == NULL) {
5217 /* Set the parameters to backend:
5218 * - For every request in NORMAL MODE
5219 * - For every request in HFR mode during preview only case
5220 * - Once every batch in HFR mode during video recording
5221 */
5222 if (!mBatchSize ||
5223 (mBatchSize && !isVidBufRequested) ||
5224 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5225 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5226 mBatchSize, isVidBufRequested,
5227 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005228
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005229 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5230 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5231 uint32_t m = 0;
5232 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5233 if (streamsArray.stream_request[k].streamID ==
5234 mBatchedStreamsArray.stream_request[m].streamID)
5235 break;
5236 }
5237 if (m == mBatchedStreamsArray.num_streams) {
5238 mBatchedStreamsArray.stream_request\
5239 [mBatchedStreamsArray.num_streams].streamID =
5240 streamsArray.stream_request[k].streamID;
5241 mBatchedStreamsArray.stream_request\
5242 [mBatchedStreamsArray.num_streams].buf_index =
5243 streamsArray.stream_request[k].buf_index;
5244 mBatchedStreamsArray.num_streams =
5245 mBatchedStreamsArray.num_streams + 1;
5246 }
5247 }
5248 streamsArray = mBatchedStreamsArray;
5249 }
5250 /* Update stream id of all the requested buffers */
5251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5252 streamsArray)) {
5253 LOGE("Failed to set stream type mask in the parameters");
5254 return BAD_VALUE;
5255 }
5256
5257 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5258 mParameters);
5259 if (rc < 0) {
5260 LOGE("set_parms failed");
5261 }
5262 /* reset to zero coz, the batch is queued */
5263 mToBeQueuedVidBufs = 0;
5264 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5265 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5266 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005267 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5268 uint32_t m = 0;
5269 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5270 if (streamsArray.stream_request[k].streamID ==
5271 mBatchedStreamsArray.stream_request[m].streamID)
5272 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005273 }
5274 if (m == mBatchedStreamsArray.num_streams) {
5275 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5276 streamID = streamsArray.stream_request[k].streamID;
5277 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5278 buf_index = streamsArray.stream_request[k].buf_index;
5279 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5280 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005281 }
5282 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005283 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 }
5286
5287 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5288
5289 mState = STARTED;
5290 // Added a timed condition wait
5291 struct timespec ts;
5292 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005293 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 if (rc < 0) {
5295 isValidTimeout = 0;
5296 LOGE("Error reading the real time clock!!");
5297 }
5298 else {
5299 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005300 int64_t timeout = 5;
5301 {
5302 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5303 // If there is a pending HDR+ request, the following requests may be blocked until the
5304 // HDR+ request is done. So allow a longer timeout.
5305 if (mHdrPlusPendingRequests.size() > 0) {
5306 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5307 }
5308 }
5309 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 }
5311 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005312 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 (mState != ERROR) && (mState != DEINIT)) {
5314 if (!isValidTimeout) {
5315 LOGD("Blocking on conditional wait");
5316 pthread_cond_wait(&mRequestCond, &mMutex);
5317 }
5318 else {
5319 LOGD("Blocking on timed conditional wait");
5320 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5321 if (rc == ETIMEDOUT) {
5322 rc = -ENODEV;
5323 LOGE("Unblocked on timeout!!!!");
5324 break;
5325 }
5326 }
5327 LOGD("Unblocked");
5328 if (mWokenUpByDaemon) {
5329 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005330 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005331 break;
5332 }
5333 }
5334 pthread_mutex_unlock(&mMutex);
5335
5336 return rc;
5337}
5338
5339/*===========================================================================
5340 * FUNCTION : dump
5341 *
5342 * DESCRIPTION:
5343 *
5344 * PARAMETERS :
5345 *
5346 *
5347 * RETURN :
5348 *==========================================================================*/
5349void QCamera3HardwareInterface::dump(int fd)
5350{
5351 pthread_mutex_lock(&mMutex);
5352 dprintf(fd, "\n Camera HAL3 information Begin \n");
5353
5354 dprintf(fd, "\nNumber of pending requests: %zu \n",
5355 mPendingRequestsList.size());
5356 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5357 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5358 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5359 for(pendingRequestIterator i = mPendingRequestsList.begin();
5360 i != mPendingRequestsList.end(); i++) {
5361 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5362 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5363 i->input_buffer);
5364 }
5365 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5366 mPendingBuffersMap.get_num_overall_buffers());
5367 dprintf(fd, "-------+------------------\n");
5368 dprintf(fd, " Frame | Stream type mask \n");
5369 dprintf(fd, "-------+------------------\n");
5370 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5371 for(auto &j : req.mPendingBufferList) {
5372 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5373 dprintf(fd, " %5d | %11d \n",
5374 req.frame_number, channel->getStreamTypeMask());
5375 }
5376 }
5377 dprintf(fd, "-------+------------------\n");
5378
5379 dprintf(fd, "\nPending frame drop list: %zu\n",
5380 mPendingFrameDropList.size());
5381 dprintf(fd, "-------+-----------\n");
5382 dprintf(fd, " Frame | Stream ID \n");
5383 dprintf(fd, "-------+-----------\n");
5384 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5385 i != mPendingFrameDropList.end(); i++) {
5386 dprintf(fd, " %5d | %9d \n",
5387 i->frame_number, i->stream_ID);
5388 }
5389 dprintf(fd, "-------+-----------\n");
5390
5391 dprintf(fd, "\n Camera HAL3 information End \n");
5392
5393 /* use dumpsys media.camera as trigger to send update debug level event */
5394 mUpdateDebugLevel = true;
5395 pthread_mutex_unlock(&mMutex);
5396 return;
5397}
5398
5399/*===========================================================================
5400 * FUNCTION : flush
5401 *
5402 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5403 * conditionally restarts channels
5404 *
5405 * PARAMETERS :
5406 * @ restartChannels: re-start all channels
5407 *
5408 *
5409 * RETURN :
5410 * 0 on success
5411 * Error code on failure
5412 *==========================================================================*/
5413int QCamera3HardwareInterface::flush(bool restartChannels)
5414{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005415 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005416 int32_t rc = NO_ERROR;
5417
5418 LOGD("Unblocking Process Capture Request");
5419 pthread_mutex_lock(&mMutex);
5420 mFlush = true;
5421 pthread_mutex_unlock(&mMutex);
5422
5423 rc = stopAllChannels();
5424 // unlink of dualcam
5425 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005426 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5427 &m_pDualCamCmdPtr->bundle_info;
5428 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5430 pthread_mutex_lock(&gCamLock);
5431
5432 if (mIsMainCamera == 1) {
5433 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5434 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005435 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 // related session id should be session id of linked session
5437 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5438 } else {
5439 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5440 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005441 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005442 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5443 }
5444 pthread_mutex_unlock(&gCamLock);
5445
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005446 rc = mCameraHandle->ops->set_dual_cam_cmd(
5447 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005448 if (rc < 0) {
5449 LOGE("Dualcam: Unlink failed, but still proceed to close");
5450 }
5451 }
5452
5453 if (rc < 0) {
5454 LOGE("stopAllChannels failed");
5455 return rc;
5456 }
5457 if (mChannelHandle) {
5458 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5459 mChannelHandle);
5460 }
5461
5462 // Reset bundle info
5463 rc = setBundleInfo();
5464 if (rc < 0) {
5465 LOGE("setBundleInfo failed %d", rc);
5466 return rc;
5467 }
5468
5469 // Mutex Lock
5470 pthread_mutex_lock(&mMutex);
5471
5472 // Unblock process_capture_request
5473 mPendingLiveRequest = 0;
5474 pthread_cond_signal(&mRequestCond);
5475
5476 rc = notifyErrorForPendingRequests();
5477 if (rc < 0) {
5478 LOGE("notifyErrorForPendingRequests failed");
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
5482
5483 mFlush = false;
5484
5485 // Start the Streams/Channels
5486 if (restartChannels) {
5487 rc = startAllChannels();
5488 if (rc < 0) {
5489 LOGE("startAllChannels failed");
5490 pthread_mutex_unlock(&mMutex);
5491 return rc;
5492 }
5493 }
5494
5495 if (mChannelHandle) {
5496 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5497 mChannelHandle);
5498 if (rc < 0) {
5499 LOGE("start_channel failed");
5500 pthread_mutex_unlock(&mMutex);
5501 return rc;
5502 }
5503 }
5504
5505 pthread_mutex_unlock(&mMutex);
5506
5507 return 0;
5508}
5509
5510/*===========================================================================
5511 * FUNCTION : flushPerf
5512 *
5513 * DESCRIPTION: This is the performance optimization version of flush that does
5514 * not use stream off, rather flushes the system
5515 *
5516 * PARAMETERS :
5517 *
5518 *
5519 * RETURN : 0 : success
5520 * -EINVAL: input is malformed (device is not valid)
5521 * -ENODEV: if the device has encountered a serious error
5522 *==========================================================================*/
5523int QCamera3HardwareInterface::flushPerf()
5524{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005525 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 int32_t rc = 0;
5527 struct timespec timeout;
5528 bool timed_wait = false;
5529
5530 pthread_mutex_lock(&mMutex);
5531 mFlushPerf = true;
5532 mPendingBuffersMap.numPendingBufsAtFlush =
5533 mPendingBuffersMap.get_num_overall_buffers();
5534 LOGD("Calling flush. Wait for %d buffers to return",
5535 mPendingBuffersMap.numPendingBufsAtFlush);
5536
5537 /* send the flush event to the backend */
5538 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5539 if (rc < 0) {
5540 LOGE("Error in flush: IOCTL failure");
5541 mFlushPerf = false;
5542 pthread_mutex_unlock(&mMutex);
5543 return -ENODEV;
5544 }
5545
5546 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5547 LOGD("No pending buffers in HAL, return flush");
5548 mFlushPerf = false;
5549 pthread_mutex_unlock(&mMutex);
5550 return rc;
5551 }
5552
5553 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005554 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005555 if (rc < 0) {
5556 LOGE("Error reading the real time clock, cannot use timed wait");
5557 } else {
5558 timeout.tv_sec += FLUSH_TIMEOUT;
5559 timed_wait = true;
5560 }
5561
5562 //Block on conditional variable
5563 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5564 LOGD("Waiting on mBuffersCond");
5565 if (!timed_wait) {
5566 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5567 if (rc != 0) {
5568 LOGE("pthread_cond_wait failed due to rc = %s",
5569 strerror(rc));
5570 break;
5571 }
5572 } else {
5573 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5574 if (rc != 0) {
5575 LOGE("pthread_cond_timedwait failed due to rc = %s",
5576 strerror(rc));
5577 break;
5578 }
5579 }
5580 }
5581 if (rc != 0) {
5582 mFlushPerf = false;
5583 pthread_mutex_unlock(&mMutex);
5584 return -ENODEV;
5585 }
5586
5587 LOGD("Received buffers, now safe to return them");
5588
5589 //make sure the channels handle flush
5590 //currently only required for the picture channel to release snapshot resources
5591 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5592 it != mStreamInfo.end(); it++) {
5593 QCamera3Channel *channel = (*it)->channel;
5594 if (channel) {
5595 rc = channel->flush();
5596 if (rc) {
5597 LOGE("Flushing the channels failed with error %d", rc);
5598 // even though the channel flush failed we need to continue and
5599 // return the buffers we have to the framework, however the return
5600 // value will be an error
5601 rc = -ENODEV;
5602 }
5603 }
5604 }
5605
5606 /* notify the frameworks and send errored results */
5607 rc = notifyErrorForPendingRequests();
5608 if (rc < 0) {
5609 LOGE("notifyErrorForPendingRequests failed");
5610 pthread_mutex_unlock(&mMutex);
5611 return rc;
5612 }
5613
5614 //unblock process_capture_request
5615 mPendingLiveRequest = 0;
5616 unblockRequestIfNecessary();
5617
5618 mFlushPerf = false;
5619 pthread_mutex_unlock(&mMutex);
5620 LOGD ("Flush Operation complete. rc = %d", rc);
5621 return rc;
5622}
5623
5624/*===========================================================================
5625 * FUNCTION : handleCameraDeviceError
5626 *
5627 * DESCRIPTION: This function calls internal flush and notifies the error to
5628 * framework and updates the state variable.
5629 *
5630 * PARAMETERS : None
5631 *
5632 * RETURN : NO_ERROR on Success
5633 * Error code on failure
5634 *==========================================================================*/
5635int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5636{
5637 int32_t rc = NO_ERROR;
5638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005639 {
5640 Mutex::Autolock lock(mFlushLock);
5641 pthread_mutex_lock(&mMutex);
5642 if (mState != ERROR) {
5643 //if mState != ERROR, nothing to be done
5644 pthread_mutex_unlock(&mMutex);
5645 return NO_ERROR;
5646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005647 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005648
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005649 rc = flush(false /* restart channels */);
5650 if (NO_ERROR != rc) {
5651 LOGE("internal flush to handle mState = ERROR failed");
5652 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005653
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005654 pthread_mutex_lock(&mMutex);
5655 mState = DEINIT;
5656 pthread_mutex_unlock(&mMutex);
5657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005658
5659 camera3_notify_msg_t notify_msg;
5660 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5661 notify_msg.type = CAMERA3_MSG_ERROR;
5662 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5663 notify_msg.message.error.error_stream = NULL;
5664 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005665 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005666
5667 return rc;
5668}
5669
5670/*===========================================================================
5671 * FUNCTION : captureResultCb
5672 *
5673 * DESCRIPTION: Callback handler for all capture result
5674 * (streams, as well as metadata)
5675 *
5676 * PARAMETERS :
5677 * @metadata : metadata information
5678 * @buffer : actual gralloc buffer to be returned to frameworks.
5679 * NULL if metadata.
5680 *
5681 * RETURN : NONE
5682 *==========================================================================*/
5683void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5684 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5685{
5686 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005687 pthread_mutex_lock(&mMutex);
5688 uint8_t batchSize = mBatchSize;
5689 pthread_mutex_unlock(&mMutex);
5690 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005691 handleBatchMetadata(metadata_buf,
5692 true /* free_and_bufdone_meta_buf */);
5693 } else { /* mBatchSize = 0 */
5694 hdrPlusPerfLock(metadata_buf);
5695 pthread_mutex_lock(&mMutex);
5696 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005697 true /* free_and_bufdone_meta_buf */,
5698 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005699 pthread_mutex_unlock(&mMutex);
5700 }
5701 } else if (isInputBuffer) {
5702 pthread_mutex_lock(&mMutex);
5703 handleInputBufferWithLock(frame_number);
5704 pthread_mutex_unlock(&mMutex);
5705 } else {
5706 pthread_mutex_lock(&mMutex);
5707 handleBufferWithLock(buffer, frame_number);
5708 pthread_mutex_unlock(&mMutex);
5709 }
5710 return;
5711}
5712
5713/*===========================================================================
5714 * FUNCTION : getReprocessibleOutputStreamId
5715 *
5716 * DESCRIPTION: Get source output stream id for the input reprocess stream
5717 * based on size and format, which would be the largest
5718 * output stream if an input stream exists.
5719 *
5720 * PARAMETERS :
5721 * @id : return the stream id if found
5722 *
5723 * RETURN : int32_t type of status
5724 * NO_ERROR -- success
5725 * none-zero failure code
5726 *==========================================================================*/
5727int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5728{
5729 /* check if any output or bidirectional stream with the same size and format
5730 and return that stream */
5731 if ((mInputStreamInfo.dim.width > 0) &&
5732 (mInputStreamInfo.dim.height > 0)) {
5733 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5734 it != mStreamInfo.end(); it++) {
5735
5736 camera3_stream_t *stream = (*it)->stream;
5737 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5738 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5739 (stream->format == mInputStreamInfo.format)) {
5740 // Usage flag for an input stream and the source output stream
5741 // may be different.
5742 LOGD("Found reprocessible output stream! %p", *it);
5743 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5744 stream->usage, mInputStreamInfo.usage);
5745
5746 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5747 if (channel != NULL && channel->mStreams[0]) {
5748 id = channel->mStreams[0]->getMyServerID();
5749 return NO_ERROR;
5750 }
5751 }
5752 }
5753 } else {
5754 LOGD("No input stream, so no reprocessible output stream");
5755 }
5756 return NAME_NOT_FOUND;
5757}
5758
5759/*===========================================================================
5760 * FUNCTION : lookupFwkName
5761 *
5762 * DESCRIPTION: In case the enum is not same in fwk and backend
5763 * make sure the parameter is correctly propogated
5764 *
5765 * PARAMETERS :
5766 * @arr : map between the two enums
5767 * @len : len of the map
5768 * @hal_name : name of the hal_parm to map
5769 *
5770 * RETURN : int type of status
5771 * fwk_name -- success
5772 * none-zero failure code
5773 *==========================================================================*/
5774template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5775 size_t len, halType hal_name)
5776{
5777
5778 for (size_t i = 0; i < len; i++) {
5779 if (arr[i].hal_name == hal_name) {
5780 return arr[i].fwk_name;
5781 }
5782 }
5783
5784 /* Not able to find matching framework type is not necessarily
5785 * an error case. This happens when mm-camera supports more attributes
5786 * than the frameworks do */
5787 LOGH("Cannot find matching framework type");
5788 return NAME_NOT_FOUND;
5789}
5790
5791/*===========================================================================
5792 * FUNCTION : lookupHalName
5793 *
5794 * DESCRIPTION: In case the enum is not same in fwk and backend
5795 * make sure the parameter is correctly propogated
5796 *
5797 * PARAMETERS :
5798 * @arr : map between the two enums
5799 * @len : len of the map
5800 * @fwk_name : name of the hal_parm to map
5801 *
5802 * RETURN : int32_t type of status
5803 * hal_name -- success
5804 * none-zero failure code
5805 *==========================================================================*/
5806template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5807 size_t len, fwkType fwk_name)
5808{
5809 for (size_t i = 0; i < len; i++) {
5810 if (arr[i].fwk_name == fwk_name) {
5811 return arr[i].hal_name;
5812 }
5813 }
5814
5815 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5816 return NAME_NOT_FOUND;
5817}
5818
5819/*===========================================================================
5820 * FUNCTION : lookupProp
5821 *
5822 * DESCRIPTION: lookup a value by its name
5823 *
5824 * PARAMETERS :
5825 * @arr : map between the two enums
5826 * @len : size of the map
5827 * @name : name to be looked up
5828 *
5829 * RETURN : Value if found
5830 * CAM_CDS_MODE_MAX if not found
5831 *==========================================================================*/
5832template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5833 size_t len, const char *name)
5834{
5835 if (name) {
5836 for (size_t i = 0; i < len; i++) {
5837 if (!strcmp(arr[i].desc, name)) {
5838 return arr[i].val;
5839 }
5840 }
5841 }
5842 return CAM_CDS_MODE_MAX;
5843}
5844
5845/*===========================================================================
5846 *
5847 * DESCRIPTION:
5848 *
5849 * PARAMETERS :
5850 * @metadata : metadata information from callback
5851 * @timestamp: metadata buffer timestamp
5852 * @request_id: request id
5853 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005854 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5855 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005856 * @pprocDone: whether internal offline postprocsesing is done
5857 *
5858 * RETURN : camera_metadata_t*
5859 * metadata in a format specified by fwk
5860 *==========================================================================*/
5861camera_metadata_t*
5862QCamera3HardwareInterface::translateFromHalMetadata(
5863 metadata_buffer_t *metadata,
5864 nsecs_t timestamp,
5865 int32_t request_id,
5866 const CameraMetadata& jpegMetadata,
5867 uint8_t pipeline_depth,
5868 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005869 /* DevCamDebug metadata translateFromHalMetadata argument */
5870 uint8_t DevCamDebug_meta_enable,
5871 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005872 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005873 uint8_t fwk_cacMode,
5874 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005875{
5876 CameraMetadata camMetadata;
5877 camera_metadata_t *resultMetadata;
5878
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005879 if (mBatchSize && !firstMetadataInBatch) {
5880 /* In batch mode, use cached metadata from the first metadata
5881 in the batch */
5882 camMetadata.clear();
5883 camMetadata = mCachedMetadata;
5884 }
5885
Thierry Strudel3d639192016-09-09 11:52:26 -07005886 if (jpegMetadata.entryCount())
5887 camMetadata.append(jpegMetadata);
5888
5889 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5890 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5891 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5892 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005893 if (mBatchSize == 0) {
5894 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5895 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5896 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005897
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005898 if (mBatchSize && !firstMetadataInBatch) {
5899 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5900 resultMetadata = camMetadata.release();
5901 return resultMetadata;
5902 }
5903
Samuel Ha68ba5172016-12-15 18:41:12 -08005904 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5905 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5906 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5907 // DevCamDebug metadata translateFromHalMetadata AF
5908 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5909 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5910 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5911 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5912 }
5913 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5914 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5915 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5916 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5917 }
5918 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5919 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5920 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5921 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5922 }
5923 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5924 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5925 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5926 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5927 }
5928 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5929 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5930 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5931 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5932 }
5933 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5934 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5935 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5936 *DevCamDebug_af_monitor_pdaf_target_pos;
5937 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5938 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5939 }
5940 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5941 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5942 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5943 *DevCamDebug_af_monitor_pdaf_confidence;
5944 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5945 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5946 }
5947 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5948 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5949 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5950 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5951 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5952 }
5953 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5954 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5955 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5956 *DevCamDebug_af_monitor_tof_target_pos;
5957 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5958 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5959 }
5960 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5961 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5962 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5963 *DevCamDebug_af_monitor_tof_confidence;
5964 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5965 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5966 }
5967 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5968 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5969 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5970 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5971 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5972 }
5973 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5974 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5975 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5976 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5977 &fwk_DevCamDebug_af_monitor_type_select, 1);
5978 }
5979 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5980 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5981 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5982 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5983 &fwk_DevCamDebug_af_monitor_refocus, 1);
5984 }
5985 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5986 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5987 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5988 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5989 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5990 }
5991 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5992 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5993 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5994 *DevCamDebug_af_search_pdaf_target_pos;
5995 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5996 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5997 }
5998 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5999 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6000 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6001 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6002 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6003 }
6004 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6005 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6006 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6007 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6008 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6009 }
6010 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6011 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6012 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6013 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6014 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6015 }
6016 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6017 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6018 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6019 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6020 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6021 }
6022 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6023 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6024 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6025 *DevCamDebug_af_search_tof_target_pos;
6026 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6027 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6028 }
6029 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6030 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6031 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6032 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6033 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6034 }
6035 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6036 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6037 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6038 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6039 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6040 }
6041 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6042 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6043 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6044 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6045 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6046 }
6047 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6048 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6049 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6050 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6051 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6052 }
6053 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6054 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6055 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6056 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6057 &fwk_DevCamDebug_af_search_type_select, 1);
6058 }
6059 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6060 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6061 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6062 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6063 &fwk_DevCamDebug_af_search_next_pos, 1);
6064 }
6065 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6066 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6067 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6068 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6069 &fwk_DevCamDebug_af_search_target_pos, 1);
6070 }
6071 // DevCamDebug metadata translateFromHalMetadata AEC
6072 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6073 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6074 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6075 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6076 }
6077 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6078 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6079 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6080 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6081 }
6082 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6083 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6084 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6085 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6086 }
6087 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6088 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6089 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6090 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6091 }
6092 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6093 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6094 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6095 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6096 }
6097 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6098 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6099 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6100 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6101 }
6102 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6103 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6104 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6105 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6106 }
6107 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6108 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6109 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6110 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6111 }
6112 // DevCamDebug metadata translateFromHalMetadata AWB
6113 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6114 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6115 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6116 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6117 }
6118 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6119 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6120 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6121 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6122 }
6123 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6124 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6125 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6126 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6127 }
6128 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6129 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6130 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6131 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6132 }
6133 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6134 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6135 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6136 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6137 }
6138 }
6139 // atrace_end(ATRACE_TAG_ALWAYS);
6140
Thierry Strudel3d639192016-09-09 11:52:26 -07006141 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6142 int64_t fwk_frame_number = *frame_number;
6143 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6144 }
6145
6146 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6147 int32_t fps_range[2];
6148 fps_range[0] = (int32_t)float_range->min_fps;
6149 fps_range[1] = (int32_t)float_range->max_fps;
6150 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6151 fps_range, 2);
6152 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6153 fps_range[0], fps_range[1]);
6154 }
6155
6156 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6157 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6158 }
6159
6160 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6161 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6162 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6163 *sceneMode);
6164 if (NAME_NOT_FOUND != val) {
6165 uint8_t fwkSceneMode = (uint8_t)val;
6166 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6167 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6168 fwkSceneMode);
6169 }
6170 }
6171
6172 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6173 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6174 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6175 }
6176
6177 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6178 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6179 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6180 }
6181
6182 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6183 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6184 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6185 }
6186
6187 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6188 CAM_INTF_META_EDGE_MODE, metadata) {
6189 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6190 }
6191
6192 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6193 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6194 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6195 }
6196
6197 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6198 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6199 }
6200
6201 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6202 if (0 <= *flashState) {
6203 uint8_t fwk_flashState = (uint8_t) *flashState;
6204 if (!gCamCapability[mCameraId]->flash_available) {
6205 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6206 }
6207 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6208 }
6209 }
6210
6211 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6212 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6213 if (NAME_NOT_FOUND != val) {
6214 uint8_t fwk_flashMode = (uint8_t)val;
6215 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6216 }
6217 }
6218
6219 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6220 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6221 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6222 }
6223
6224 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6225 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6226 }
6227
6228 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6229 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6230 }
6231
6232 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6233 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6234 }
6235
6236 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6237 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6238 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6239 }
6240
6241 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6242 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6243 LOGD("fwk_videoStab = %d", fwk_videoStab);
6244 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6245 } else {
6246 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6247 // and so hardcoding the Video Stab result to OFF mode.
6248 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6249 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006250 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006251 }
6252
6253 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6254 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6255 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6256 }
6257
6258 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6259 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6260 }
6261
Thierry Strudel3d639192016-09-09 11:52:26 -07006262 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6263 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006264 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006265
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006266 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6267 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006268
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006269 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006270 blackLevelAppliedPattern->cam_black_level[0],
6271 blackLevelAppliedPattern->cam_black_level[1],
6272 blackLevelAppliedPattern->cam_black_level[2],
6273 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006274 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6275 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006276
6277#ifndef USE_HAL_3_3
6278 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006279 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6280 // depth space.
6281 fwk_blackLevelInd[0] /= 4.0;
6282 fwk_blackLevelInd[1] /= 4.0;
6283 fwk_blackLevelInd[2] /= 4.0;
6284 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006285 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6286 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006287#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006288 }
6289
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006290#ifndef USE_HAL_3_3
6291 // Fixed whitelevel is used by ISP/Sensor
6292 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6293 &gCamCapability[mCameraId]->white_level, 1);
6294#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006295
6296 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6297 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6298 int32_t scalerCropRegion[4];
6299 scalerCropRegion[0] = hScalerCropRegion->left;
6300 scalerCropRegion[1] = hScalerCropRegion->top;
6301 scalerCropRegion[2] = hScalerCropRegion->width;
6302 scalerCropRegion[3] = hScalerCropRegion->height;
6303
6304 // Adjust crop region from sensor output coordinate system to active
6305 // array coordinate system.
6306 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6307 scalerCropRegion[2], scalerCropRegion[3]);
6308
6309 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6310 }
6311
6312 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6313 LOGD("sensorExpTime = %lld", *sensorExpTime);
6314 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6315 }
6316
6317 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6318 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6319 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6320 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6321 }
6322
6323 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6324 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6325 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6326 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6327 sensorRollingShutterSkew, 1);
6328 }
6329
6330 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6331 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6332 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6333
6334 //calculate the noise profile based on sensitivity
6335 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6336 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6337 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6338 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6339 noise_profile[i] = noise_profile_S;
6340 noise_profile[i+1] = noise_profile_O;
6341 }
6342 LOGD("noise model entry (S, O) is (%f, %f)",
6343 noise_profile_S, noise_profile_O);
6344 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6345 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6346 }
6347
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006348#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006349 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006350 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006351 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006352 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006353 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6354 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6355 }
6356 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006357#endif
6358
Thierry Strudel3d639192016-09-09 11:52:26 -07006359 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6360 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6361 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6362 }
6363
6364 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6365 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6366 *faceDetectMode);
6367 if (NAME_NOT_FOUND != val) {
6368 uint8_t fwk_faceDetectMode = (uint8_t)val;
6369 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6370
6371 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6372 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6373 CAM_INTF_META_FACE_DETECTION, metadata) {
6374 uint8_t numFaces = MIN(
6375 faceDetectionInfo->num_faces_detected, MAX_ROI);
6376 int32_t faceIds[MAX_ROI];
6377 uint8_t faceScores[MAX_ROI];
6378 int32_t faceRectangles[MAX_ROI * 4];
6379 int32_t faceLandmarks[MAX_ROI * 6];
6380 size_t j = 0, k = 0;
6381
6382 for (size_t i = 0; i < numFaces; i++) {
6383 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6384 // Adjust crop region from sensor output coordinate system to active
6385 // array coordinate system.
6386 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6387 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6388 rect.width, rect.height);
6389
6390 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6391 faceRectangles+j, -1);
6392
6393 j+= 4;
6394 }
6395 if (numFaces <= 0) {
6396 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6397 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6398 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6399 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6400 }
6401
6402 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6403 numFaces);
6404 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6405 faceRectangles, numFaces * 4U);
6406 if (fwk_faceDetectMode ==
6407 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6408 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6409 CAM_INTF_META_FACE_LANDMARK, metadata) {
6410
6411 for (size_t i = 0; i < numFaces; i++) {
6412 // Map the co-ordinate sensor output coordinate system to active
6413 // array coordinate system.
6414 mCropRegionMapper.toActiveArray(
6415 landmarks->face_landmarks[i].left_eye_center.x,
6416 landmarks->face_landmarks[i].left_eye_center.y);
6417 mCropRegionMapper.toActiveArray(
6418 landmarks->face_landmarks[i].right_eye_center.x,
6419 landmarks->face_landmarks[i].right_eye_center.y);
6420 mCropRegionMapper.toActiveArray(
6421 landmarks->face_landmarks[i].mouth_center.x,
6422 landmarks->face_landmarks[i].mouth_center.y);
6423
6424 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006425 k+= TOTAL_LANDMARK_INDICES;
6426 }
6427 } else {
6428 for (size_t i = 0; i < numFaces; i++) {
6429 setInvalidLandmarks(faceLandmarks+k);
6430 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006431 }
6432 }
6433
6434 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6435 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6436 faceLandmarks, numFaces * 6U);
6437 }
6438 }
6439 }
6440 }
6441 }
6442
6443 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6444 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6445 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006446
6447 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6448 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6449 // process histogram statistics info
6450 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6451 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6452 cam_histogram_data_t rHistData, gHistData, bHistData;
6453 memset(&rHistData, 0, sizeof(rHistData));
6454 memset(&gHistData, 0, sizeof(gHistData));
6455 memset(&bHistData, 0, sizeof(bHistData));
6456
6457 switch (stats_data->type) {
6458 case CAM_HISTOGRAM_TYPE_BAYER:
6459 switch (stats_data->bayer_stats.data_type) {
6460 case CAM_STATS_CHANNEL_GR:
6461 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6462 break;
6463 case CAM_STATS_CHANNEL_GB:
6464 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6465 break;
6466 case CAM_STATS_CHANNEL_B:
6467 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6468 break;
6469 case CAM_STATS_CHANNEL_ALL:
6470 rHistData = stats_data->bayer_stats.r_stats;
6471 //Framework expects only 3 channels. So, for now,
6472 //use gb stats for G channel.
6473 gHistData = stats_data->bayer_stats.gb_stats;
6474 bHistData = stats_data->bayer_stats.b_stats;
6475 break;
6476 case CAM_STATS_CHANNEL_Y:
6477 case CAM_STATS_CHANNEL_R:
6478 default:
6479 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6480 break;
6481 }
6482 break;
6483 case CAM_HISTOGRAM_TYPE_YUV:
6484 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6485 break;
6486 }
6487
6488 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6489 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6490 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6491
6492 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6493 }
6494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006495 }
6496
6497 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6498 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6499 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6500 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6501 }
6502
6503 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6504 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6505 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6506 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6507 }
6508
6509 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6510 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6511 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6512 CAM_MAX_SHADING_MAP_HEIGHT);
6513 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6514 CAM_MAX_SHADING_MAP_WIDTH);
6515 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6516 lensShadingMap->lens_shading, 4U * map_width * map_height);
6517 }
6518
6519 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6520 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6521 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6522 }
6523
6524 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6525 //Populate CAM_INTF_META_TONEMAP_CURVES
6526 /* ch0 = G, ch 1 = B, ch 2 = R*/
6527 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6528 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6529 tonemap->tonemap_points_cnt,
6530 CAM_MAX_TONEMAP_CURVE_SIZE);
6531 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6532 }
6533
6534 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6535 &tonemap->curves[0].tonemap_points[0][0],
6536 tonemap->tonemap_points_cnt * 2);
6537
6538 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6539 &tonemap->curves[1].tonemap_points[0][0],
6540 tonemap->tonemap_points_cnt * 2);
6541
6542 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6543 &tonemap->curves[2].tonemap_points[0][0],
6544 tonemap->tonemap_points_cnt * 2);
6545 }
6546
6547 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6548 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6549 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6550 CC_GAIN_MAX);
6551 }
6552
6553 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6554 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6555 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6556 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6557 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6558 }
6559
6560 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6561 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6562 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6563 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6564 toneCurve->tonemap_points_cnt,
6565 CAM_MAX_TONEMAP_CURVE_SIZE);
6566 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6567 }
6568 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6569 (float*)toneCurve->curve.tonemap_points,
6570 toneCurve->tonemap_points_cnt * 2);
6571 }
6572
6573 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6574 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6575 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6576 predColorCorrectionGains->gains, 4);
6577 }
6578
6579 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6580 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6581 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6582 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6583 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6584 }
6585
6586 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6587 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6588 }
6589
6590 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6591 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6592 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6593 }
6594
6595 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6596 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6597 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6598 }
6599
6600 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6601 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6602 *effectMode);
6603 if (NAME_NOT_FOUND != val) {
6604 uint8_t fwk_effectMode = (uint8_t)val;
6605 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6606 }
6607 }
6608
6609 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6610 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6611 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6612 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6613 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6614 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6615 }
6616 int32_t fwk_testPatternData[4];
6617 fwk_testPatternData[0] = testPatternData->r;
6618 fwk_testPatternData[3] = testPatternData->b;
6619 switch (gCamCapability[mCameraId]->color_arrangement) {
6620 case CAM_FILTER_ARRANGEMENT_RGGB:
6621 case CAM_FILTER_ARRANGEMENT_GRBG:
6622 fwk_testPatternData[1] = testPatternData->gr;
6623 fwk_testPatternData[2] = testPatternData->gb;
6624 break;
6625 case CAM_FILTER_ARRANGEMENT_GBRG:
6626 case CAM_FILTER_ARRANGEMENT_BGGR:
6627 fwk_testPatternData[2] = testPatternData->gr;
6628 fwk_testPatternData[1] = testPatternData->gb;
6629 break;
6630 default:
6631 LOGE("color arrangement %d is not supported",
6632 gCamCapability[mCameraId]->color_arrangement);
6633 break;
6634 }
6635 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6636 }
6637
6638 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6639 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6640 }
6641
6642 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6643 String8 str((const char *)gps_methods);
6644 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6645 }
6646
6647 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6648 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6649 }
6650
6651 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6652 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6653 }
6654
6655 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6656 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6657 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6658 }
6659
6660 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6661 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6662 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6663 }
6664
6665 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6666 int32_t fwk_thumb_size[2];
6667 fwk_thumb_size[0] = thumb_size->width;
6668 fwk_thumb_size[1] = thumb_size->height;
6669 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6670 }
6671
6672 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6673 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6674 privateData,
6675 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6676 }
6677
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006678 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6679 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6680 meteringMode, 1);
6681 }
6682
Thierry Strudel3d639192016-09-09 11:52:26 -07006683 if (metadata->is_tuning_params_valid) {
6684 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6685 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6686 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6687
6688
6689 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6690 sizeof(uint32_t));
6691 data += sizeof(uint32_t);
6692
6693 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6694 sizeof(uint32_t));
6695 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6696 data += sizeof(uint32_t);
6697
6698 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6699 sizeof(uint32_t));
6700 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6701 data += sizeof(uint32_t);
6702
6703 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6704 sizeof(uint32_t));
6705 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6706 data += sizeof(uint32_t);
6707
6708 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6709 sizeof(uint32_t));
6710 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6711 data += sizeof(uint32_t);
6712
6713 metadata->tuning_params.tuning_mod3_data_size = 0;
6714 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6715 sizeof(uint32_t));
6716 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6717 data += sizeof(uint32_t);
6718
6719 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6720 TUNING_SENSOR_DATA_MAX);
6721 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6722 count);
6723 data += count;
6724
6725 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6726 TUNING_VFE_DATA_MAX);
6727 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6728 count);
6729 data += count;
6730
6731 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6732 TUNING_CPP_DATA_MAX);
6733 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6734 count);
6735 data += count;
6736
6737 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6738 TUNING_CAC_DATA_MAX);
6739 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6740 count);
6741 data += count;
6742
6743 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6744 (int32_t *)(void *)tuning_meta_data_blob,
6745 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6746 }
6747
6748 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6749 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6750 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6751 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6752 NEUTRAL_COL_POINTS);
6753 }
6754
6755 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6756 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6757 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6758 }
6759
6760 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6761 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6762 // Adjust crop region from sensor output coordinate system to active
6763 // array coordinate system.
6764 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6765 hAeRegions->rect.width, hAeRegions->rect.height);
6766
6767 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6768 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6769 REGIONS_TUPLE_COUNT);
6770 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6771 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6772 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6773 hAeRegions->rect.height);
6774 }
6775
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006776 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6777 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6778 if (NAME_NOT_FOUND != val) {
6779 uint8_t fwkAfMode = (uint8_t)val;
6780 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6781 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6782 } else {
6783 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6784 val);
6785 }
6786 }
6787
Thierry Strudel3d639192016-09-09 11:52:26 -07006788 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6789 uint8_t fwk_afState = (uint8_t) *afState;
6790 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006791 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006792 }
6793
6794 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6795 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6796 }
6797
6798 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6799 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6800 }
6801
6802 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6803 uint8_t fwk_lensState = *lensState;
6804 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6805 }
6806
6807 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6808 /*af regions*/
6809 int32_t afRegions[REGIONS_TUPLE_COUNT];
6810 // Adjust crop region from sensor output coordinate system to active
6811 // array coordinate system.
6812 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6813 hAfRegions->rect.width, hAfRegions->rect.height);
6814
6815 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6816 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6817 REGIONS_TUPLE_COUNT);
6818 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6819 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6820 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6821 hAfRegions->rect.height);
6822 }
6823
6824 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006825 uint32_t ab_mode = *hal_ab_mode;
6826 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6827 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6828 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006830 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006831 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006832 if (NAME_NOT_FOUND != val) {
6833 uint8_t fwk_ab_mode = (uint8_t)val;
6834 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6835 }
6836 }
6837
6838 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6839 int val = lookupFwkName(SCENE_MODES_MAP,
6840 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6841 if (NAME_NOT_FOUND != val) {
6842 uint8_t fwkBestshotMode = (uint8_t)val;
6843 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6844 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6845 } else {
6846 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6847 }
6848 }
6849
6850 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6851 uint8_t fwk_mode = (uint8_t) *mode;
6852 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6853 }
6854
6855 /* Constant metadata values to be update*/
6856 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6857 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6858
6859 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6860 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6861
6862 int32_t hotPixelMap[2];
6863 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6864
6865 // CDS
6866 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6867 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6868 }
6869
Thierry Strudel04e026f2016-10-10 11:27:36 -07006870 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6871 int32_t fwk_hdr;
6872 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6873 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6874 } else {
6875 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6876 }
6877 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6878 }
6879
6880 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006881 int32_t fwk_ir = (int32_t) *ir;
6882 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006883 }
6884
Thierry Strudel269c81a2016-10-12 12:13:59 -07006885 // AEC SPEED
6886 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6887 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6888 }
6889
6890 // AWB SPEED
6891 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6892 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6893 }
6894
Thierry Strudel3d639192016-09-09 11:52:26 -07006895 // TNR
6896 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6897 uint8_t tnr_enable = tnr->denoise_enable;
6898 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6899
6900 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6901 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6902 }
6903
6904 // Reprocess crop data
6905 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6906 uint8_t cnt = crop_data->num_of_streams;
6907 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6908 // mm-qcamera-daemon only posts crop_data for streams
6909 // not linked to pproc. So no valid crop metadata is not
6910 // necessarily an error case.
6911 LOGD("No valid crop metadata entries");
6912 } else {
6913 uint32_t reproc_stream_id;
6914 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6915 LOGD("No reprocessible stream found, ignore crop data");
6916 } else {
6917 int rc = NO_ERROR;
6918 Vector<int32_t> roi_map;
6919 int32_t *crop = new int32_t[cnt*4];
6920 if (NULL == crop) {
6921 rc = NO_MEMORY;
6922 }
6923 if (NO_ERROR == rc) {
6924 int32_t streams_found = 0;
6925 for (size_t i = 0; i < cnt; i++) {
6926 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6927 if (pprocDone) {
6928 // HAL already does internal reprocessing,
6929 // either via reprocessing before JPEG encoding,
6930 // or offline postprocessing for pproc bypass case.
6931 crop[0] = 0;
6932 crop[1] = 0;
6933 crop[2] = mInputStreamInfo.dim.width;
6934 crop[3] = mInputStreamInfo.dim.height;
6935 } else {
6936 crop[0] = crop_data->crop_info[i].crop.left;
6937 crop[1] = crop_data->crop_info[i].crop.top;
6938 crop[2] = crop_data->crop_info[i].crop.width;
6939 crop[3] = crop_data->crop_info[i].crop.height;
6940 }
6941 roi_map.add(crop_data->crop_info[i].roi_map.left);
6942 roi_map.add(crop_data->crop_info[i].roi_map.top);
6943 roi_map.add(crop_data->crop_info[i].roi_map.width);
6944 roi_map.add(crop_data->crop_info[i].roi_map.height);
6945 streams_found++;
6946 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6947 crop[0], crop[1], crop[2], crop[3]);
6948 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6949 crop_data->crop_info[i].roi_map.left,
6950 crop_data->crop_info[i].roi_map.top,
6951 crop_data->crop_info[i].roi_map.width,
6952 crop_data->crop_info[i].roi_map.height);
6953 break;
6954
6955 }
6956 }
6957 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6958 &streams_found, 1);
6959 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6960 crop, (size_t)(streams_found * 4));
6961 if (roi_map.array()) {
6962 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6963 roi_map.array(), roi_map.size());
6964 }
6965 }
6966 if (crop) {
6967 delete [] crop;
6968 }
6969 }
6970 }
6971 }
6972
6973 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6974 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6975 // so hardcoding the CAC result to OFF mode.
6976 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6977 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6978 } else {
6979 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6980 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6981 *cacMode);
6982 if (NAME_NOT_FOUND != val) {
6983 uint8_t resultCacMode = (uint8_t)val;
6984 // check whether CAC result from CB is equal to Framework set CAC mode
6985 // If not equal then set the CAC mode came in corresponding request
6986 if (fwk_cacMode != resultCacMode) {
6987 resultCacMode = fwk_cacMode;
6988 }
6989 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6990 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6991 } else {
6992 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6993 }
6994 }
6995 }
6996
6997 // Post blob of cam_cds_data through vendor tag.
6998 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6999 uint8_t cnt = cdsInfo->num_of_streams;
7000 cam_cds_data_t cdsDataOverride;
7001 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7002 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7003 cdsDataOverride.num_of_streams = 1;
7004 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7005 uint32_t reproc_stream_id;
7006 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7007 LOGD("No reprocessible stream found, ignore cds data");
7008 } else {
7009 for (size_t i = 0; i < cnt; i++) {
7010 if (cdsInfo->cds_info[i].stream_id ==
7011 reproc_stream_id) {
7012 cdsDataOverride.cds_info[0].cds_enable =
7013 cdsInfo->cds_info[i].cds_enable;
7014 break;
7015 }
7016 }
7017 }
7018 } else {
7019 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7020 }
7021 camMetadata.update(QCAMERA3_CDS_INFO,
7022 (uint8_t *)&cdsDataOverride,
7023 sizeof(cam_cds_data_t));
7024 }
7025
7026 // Ldaf calibration data
7027 if (!mLdafCalibExist) {
7028 IF_META_AVAILABLE(uint32_t, ldafCalib,
7029 CAM_INTF_META_LDAF_EXIF, metadata) {
7030 mLdafCalibExist = true;
7031 mLdafCalib[0] = ldafCalib[0];
7032 mLdafCalib[1] = ldafCalib[1];
7033 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7034 ldafCalib[0], ldafCalib[1]);
7035 }
7036 }
7037
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007038 // Reprocess and DDM debug data through vendor tag
7039 cam_reprocess_info_t repro_info;
7040 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007041 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7042 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007043 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007044 }
7045 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7046 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007047 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007048 }
7049 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7050 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007051 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007052 }
7053 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7054 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007055 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007056 }
7057 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7058 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007059 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007060 }
7061 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007062 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007063 }
7064 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7065 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007066 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007067 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007068 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7069 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7070 }
7071 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7072 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7073 }
7074 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7075 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007076
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007077 // INSTANT AEC MODE
7078 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7079 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7080 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7081 }
7082
Shuzhen Wange763e802016-03-31 10:24:29 -07007083 // AF scene change
7084 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7085 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7086 }
7087
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007088 /* In batch mode, cache the first metadata in the batch */
7089 if (mBatchSize && firstMetadataInBatch) {
7090 mCachedMetadata.clear();
7091 mCachedMetadata = camMetadata;
7092 }
7093
Thierry Strudel3d639192016-09-09 11:52:26 -07007094 resultMetadata = camMetadata.release();
7095 return resultMetadata;
7096}
7097
7098/*===========================================================================
7099 * FUNCTION : saveExifParams
7100 *
7101 * DESCRIPTION:
7102 *
7103 * PARAMETERS :
7104 * @metadata : metadata information from callback
7105 *
7106 * RETURN : none
7107 *
7108 *==========================================================================*/
7109void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7110{
7111 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7112 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7113 if (mExifParams.debug_params) {
7114 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7115 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7116 }
7117 }
7118 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7119 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7120 if (mExifParams.debug_params) {
7121 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7122 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7123 }
7124 }
7125 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7126 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7127 if (mExifParams.debug_params) {
7128 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7129 mExifParams.debug_params->af_debug_params_valid = TRUE;
7130 }
7131 }
7132 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7133 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7134 if (mExifParams.debug_params) {
7135 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7136 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7137 }
7138 }
7139 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7140 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7141 if (mExifParams.debug_params) {
7142 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7143 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7144 }
7145 }
7146 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7147 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7148 if (mExifParams.debug_params) {
7149 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7150 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7151 }
7152 }
7153 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7154 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7155 if (mExifParams.debug_params) {
7156 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7157 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7158 }
7159 }
7160 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7161 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7162 if (mExifParams.debug_params) {
7163 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7164 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7165 }
7166 }
7167}
7168
7169/*===========================================================================
7170 * FUNCTION : get3AExifParams
7171 *
7172 * DESCRIPTION:
7173 *
7174 * PARAMETERS : none
7175 *
7176 *
7177 * RETURN : mm_jpeg_exif_params_t
7178 *
7179 *==========================================================================*/
7180mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7181{
7182 return mExifParams;
7183}
7184
7185/*===========================================================================
7186 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7187 *
7188 * DESCRIPTION:
7189 *
7190 * PARAMETERS :
7191 * @metadata : metadata information from callback
7192 *
7193 * RETURN : camera_metadata_t*
7194 * metadata in a format specified by fwk
7195 *==========================================================================*/
7196camera_metadata_t*
7197QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7198 (metadata_buffer_t *metadata)
7199{
7200 CameraMetadata camMetadata;
7201 camera_metadata_t *resultMetadata;
7202
7203
7204 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7205 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7206 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7207 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7208 }
7209
7210 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7211 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7212 &aecTrigger->trigger, 1);
7213 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7214 &aecTrigger->trigger_id, 1);
7215 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7216 aecTrigger->trigger);
7217 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7218 aecTrigger->trigger_id);
7219 }
7220
7221 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7222 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7223 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7224 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7225 }
7226
Thierry Strudel3d639192016-09-09 11:52:26 -07007227 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7228 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7229 &af_trigger->trigger, 1);
7230 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7231 af_trigger->trigger);
7232 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7233 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7234 af_trigger->trigger_id);
7235 }
7236
7237 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7238 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7239 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7240 if (NAME_NOT_FOUND != val) {
7241 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7242 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7243 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7244 } else {
7245 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7246 }
7247 }
7248
7249 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7250 uint32_t aeMode = CAM_AE_MODE_MAX;
7251 int32_t flashMode = CAM_FLASH_MODE_MAX;
7252 int32_t redeye = -1;
7253 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7254 aeMode = *pAeMode;
7255 }
7256 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7257 flashMode = *pFlashMode;
7258 }
7259 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7260 redeye = *pRedeye;
7261 }
7262
7263 if (1 == redeye) {
7264 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7265 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7266 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7267 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7268 flashMode);
7269 if (NAME_NOT_FOUND != val) {
7270 fwk_aeMode = (uint8_t)val;
7271 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7272 } else {
7273 LOGE("Unsupported flash mode %d", flashMode);
7274 }
7275 } else if (aeMode == CAM_AE_MODE_ON) {
7276 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7277 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7278 } else if (aeMode == CAM_AE_MODE_OFF) {
7279 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7280 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7281 } else {
7282 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7283 "flashMode:%d, aeMode:%u!!!",
7284 redeye, flashMode, aeMode);
7285 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007286 if (mInstantAEC) {
7287 // Increment frame Idx count untill a bound reached for instant AEC.
7288 mInstantAecFrameIdxCount++;
7289 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7290 CAM_INTF_META_AEC_INFO, metadata) {
7291 LOGH("ae_params->settled = %d",ae_params->settled);
7292 // If AEC settled, or if number of frames reached bound value,
7293 // should reset instant AEC.
7294 if (ae_params->settled ||
7295 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7296 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7297 mInstantAEC = false;
7298 mResetInstantAEC = true;
7299 mInstantAecFrameIdxCount = 0;
7300 }
7301 }
7302 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007303 resultMetadata = camMetadata.release();
7304 return resultMetadata;
7305}
7306
7307/*===========================================================================
7308 * FUNCTION : dumpMetadataToFile
7309 *
7310 * DESCRIPTION: Dumps tuning metadata to file system
7311 *
7312 * PARAMETERS :
7313 * @meta : tuning metadata
7314 * @dumpFrameCount : current dump frame count
7315 * @enabled : Enable mask
7316 *
7317 *==========================================================================*/
7318void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7319 uint32_t &dumpFrameCount,
7320 bool enabled,
7321 const char *type,
7322 uint32_t frameNumber)
7323{
7324 //Some sanity checks
7325 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7326 LOGE("Tuning sensor data size bigger than expected %d: %d",
7327 meta.tuning_sensor_data_size,
7328 TUNING_SENSOR_DATA_MAX);
7329 return;
7330 }
7331
7332 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7333 LOGE("Tuning VFE data size bigger than expected %d: %d",
7334 meta.tuning_vfe_data_size,
7335 TUNING_VFE_DATA_MAX);
7336 return;
7337 }
7338
7339 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7340 LOGE("Tuning CPP data size bigger than expected %d: %d",
7341 meta.tuning_cpp_data_size,
7342 TUNING_CPP_DATA_MAX);
7343 return;
7344 }
7345
7346 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7347 LOGE("Tuning CAC data size bigger than expected %d: %d",
7348 meta.tuning_cac_data_size,
7349 TUNING_CAC_DATA_MAX);
7350 return;
7351 }
7352 //
7353
7354 if(enabled){
7355 char timeBuf[FILENAME_MAX];
7356 char buf[FILENAME_MAX];
7357 memset(buf, 0, sizeof(buf));
7358 memset(timeBuf, 0, sizeof(timeBuf));
7359 time_t current_time;
7360 struct tm * timeinfo;
7361 time (&current_time);
7362 timeinfo = localtime (&current_time);
7363 if (timeinfo != NULL) {
7364 strftime (timeBuf, sizeof(timeBuf),
7365 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7366 }
7367 String8 filePath(timeBuf);
7368 snprintf(buf,
7369 sizeof(buf),
7370 "%dm_%s_%d.bin",
7371 dumpFrameCount,
7372 type,
7373 frameNumber);
7374 filePath.append(buf);
7375 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7376 if (file_fd >= 0) {
7377 ssize_t written_len = 0;
7378 meta.tuning_data_version = TUNING_DATA_VERSION;
7379 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7380 written_len += write(file_fd, data, sizeof(uint32_t));
7381 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7382 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7383 written_len += write(file_fd, data, sizeof(uint32_t));
7384 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7385 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7386 written_len += write(file_fd, data, sizeof(uint32_t));
7387 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7388 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7389 written_len += write(file_fd, data, sizeof(uint32_t));
7390 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7391 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7392 written_len += write(file_fd, data, sizeof(uint32_t));
7393 meta.tuning_mod3_data_size = 0;
7394 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7395 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7396 written_len += write(file_fd, data, sizeof(uint32_t));
7397 size_t total_size = meta.tuning_sensor_data_size;
7398 data = (void *)((uint8_t *)&meta.data);
7399 written_len += write(file_fd, data, total_size);
7400 total_size = meta.tuning_vfe_data_size;
7401 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7402 written_len += write(file_fd, data, total_size);
7403 total_size = meta.tuning_cpp_data_size;
7404 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7405 written_len += write(file_fd, data, total_size);
7406 total_size = meta.tuning_cac_data_size;
7407 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7408 written_len += write(file_fd, data, total_size);
7409 close(file_fd);
7410 }else {
7411 LOGE("fail to open file for metadata dumping");
7412 }
7413 }
7414}
7415
7416/*===========================================================================
7417 * FUNCTION : cleanAndSortStreamInfo
7418 *
7419 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7420 * and sort them such that raw stream is at the end of the list
7421 * This is a workaround for camera daemon constraint.
7422 *
7423 * PARAMETERS : None
7424 *
7425 *==========================================================================*/
7426void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7427{
7428 List<stream_info_t *> newStreamInfo;
7429
7430 /*clean up invalid streams*/
7431 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7432 it != mStreamInfo.end();) {
7433 if(((*it)->status) == INVALID){
7434 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7435 delete channel;
7436 free(*it);
7437 it = mStreamInfo.erase(it);
7438 } else {
7439 it++;
7440 }
7441 }
7442
7443 // Move preview/video/callback/snapshot streams into newList
7444 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7445 it != mStreamInfo.end();) {
7446 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7447 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7448 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7449 newStreamInfo.push_back(*it);
7450 it = mStreamInfo.erase(it);
7451 } else
7452 it++;
7453 }
7454 // Move raw streams into newList
7455 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7456 it != mStreamInfo.end();) {
7457 newStreamInfo.push_back(*it);
7458 it = mStreamInfo.erase(it);
7459 }
7460
7461 mStreamInfo = newStreamInfo;
7462}
7463
7464/*===========================================================================
7465 * FUNCTION : extractJpegMetadata
7466 *
7467 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7468 * JPEG metadata is cached in HAL, and return as part of capture
7469 * result when metadata is returned from camera daemon.
7470 *
7471 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7472 * @request: capture request
7473 *
7474 *==========================================================================*/
7475void QCamera3HardwareInterface::extractJpegMetadata(
7476 CameraMetadata& jpegMetadata,
7477 const camera3_capture_request_t *request)
7478{
7479 CameraMetadata frame_settings;
7480 frame_settings = request->settings;
7481
7482 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7483 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7484 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7485 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7486
7487 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7488 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7489 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7490 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7491
7492 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7493 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7494 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7495 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7496
7497 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7498 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7499 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7500 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7501
7502 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7503 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7504 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7505 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7506
7507 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7508 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7509 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7510 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7511
7512 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7513 int32_t thumbnail_size[2];
7514 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7515 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7516 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7517 int32_t orientation =
7518 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007519 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007520 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7521 int32_t temp;
7522 temp = thumbnail_size[0];
7523 thumbnail_size[0] = thumbnail_size[1];
7524 thumbnail_size[1] = temp;
7525 }
7526 }
7527 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7528 thumbnail_size,
7529 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7530 }
7531
7532}
7533
7534/*===========================================================================
7535 * FUNCTION : convertToRegions
7536 *
7537 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7538 *
7539 * PARAMETERS :
7540 * @rect : cam_rect_t struct to convert
7541 * @region : int32_t destination array
7542 * @weight : if we are converting from cam_area_t, weight is valid
7543 * else weight = -1
7544 *
7545 *==========================================================================*/
7546void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7547 int32_t *region, int weight)
7548{
7549 region[0] = rect.left;
7550 region[1] = rect.top;
7551 region[2] = rect.left + rect.width;
7552 region[3] = rect.top + rect.height;
7553 if (weight > -1) {
7554 region[4] = weight;
7555 }
7556}
7557
7558/*===========================================================================
7559 * FUNCTION : convertFromRegions
7560 *
7561 * DESCRIPTION: helper method to convert from array to cam_rect_t
7562 *
7563 * PARAMETERS :
7564 * @rect : cam_rect_t struct to convert
7565 * @region : int32_t destination array
7566 * @weight : if we are converting from cam_area_t, weight is valid
7567 * else weight = -1
7568 *
7569 *==========================================================================*/
7570void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08007571 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07007572{
Thierry Strudel3d639192016-09-09 11:52:26 -07007573 int32_t x_min = frame_settings.find(tag).data.i32[0];
7574 int32_t y_min = frame_settings.find(tag).data.i32[1];
7575 int32_t x_max = frame_settings.find(tag).data.i32[2];
7576 int32_t y_max = frame_settings.find(tag).data.i32[3];
7577 roi.weight = frame_settings.find(tag).data.i32[4];
7578 roi.rect.left = x_min;
7579 roi.rect.top = y_min;
7580 roi.rect.width = x_max - x_min;
7581 roi.rect.height = y_max - y_min;
7582}
7583
7584/*===========================================================================
7585 * FUNCTION : resetIfNeededROI
7586 *
7587 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7588 * crop region
7589 *
7590 * PARAMETERS :
7591 * @roi : cam_area_t struct to resize
7592 * @scalerCropRegion : cam_crop_region_t region to compare against
7593 *
7594 *
7595 *==========================================================================*/
7596bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7597 const cam_crop_region_t* scalerCropRegion)
7598{
7599 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7600 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7601 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7602 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7603
7604 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7605 * without having this check the calculations below to validate if the roi
7606 * is inside scalar crop region will fail resulting in the roi not being
7607 * reset causing algorithm to continue to use stale roi window
7608 */
7609 if (roi->weight == 0) {
7610 return true;
7611 }
7612
7613 if ((roi_x_max < scalerCropRegion->left) ||
7614 // right edge of roi window is left of scalar crop's left edge
7615 (roi_y_max < scalerCropRegion->top) ||
7616 // bottom edge of roi window is above scalar crop's top edge
7617 (roi->rect.left > crop_x_max) ||
7618 // left edge of roi window is beyond(right) of scalar crop's right edge
7619 (roi->rect.top > crop_y_max)){
7620 // top edge of roi windo is above scalar crop's top edge
7621 return false;
7622 }
7623 if (roi->rect.left < scalerCropRegion->left) {
7624 roi->rect.left = scalerCropRegion->left;
7625 }
7626 if (roi->rect.top < scalerCropRegion->top) {
7627 roi->rect.top = scalerCropRegion->top;
7628 }
7629 if (roi_x_max > crop_x_max) {
7630 roi_x_max = crop_x_max;
7631 }
7632 if (roi_y_max > crop_y_max) {
7633 roi_y_max = crop_y_max;
7634 }
7635 roi->rect.width = roi_x_max - roi->rect.left;
7636 roi->rect.height = roi_y_max - roi->rect.top;
7637 return true;
7638}
7639
7640/*===========================================================================
7641 * FUNCTION : convertLandmarks
7642 *
7643 * DESCRIPTION: helper method to extract the landmarks from face detection info
7644 *
7645 * PARAMETERS :
7646 * @landmark_data : input landmark data to be converted
7647 * @landmarks : int32_t destination array
7648 *
7649 *
7650 *==========================================================================*/
7651void QCamera3HardwareInterface::convertLandmarks(
7652 cam_face_landmarks_info_t landmark_data,
7653 int32_t *landmarks)
7654{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007655 if (landmark_data.is_left_eye_valid) {
7656 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7657 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7658 } else {
7659 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7660 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7661 }
7662
7663 if (landmark_data.is_right_eye_valid) {
7664 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7665 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7666 } else {
7667 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7668 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7669 }
7670
7671 if (landmark_data.is_mouth_valid) {
7672 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7673 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7674 } else {
7675 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7676 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7677 }
7678}
7679
7680/*===========================================================================
7681 * FUNCTION : setInvalidLandmarks
7682 *
7683 * DESCRIPTION: helper method to set invalid landmarks
7684 *
7685 * PARAMETERS :
7686 * @landmarks : int32_t destination array
7687 *
7688 *
7689 *==========================================================================*/
7690void QCamera3HardwareInterface::setInvalidLandmarks(
7691 int32_t *landmarks)
7692{
7693 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7694 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7695 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7696 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7697 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7698 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007699}
7700
7701#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007702
7703/*===========================================================================
7704 * FUNCTION : getCapabilities
7705 *
7706 * DESCRIPTION: query camera capability from back-end
7707 *
7708 * PARAMETERS :
7709 * @ops : mm-interface ops structure
7710 * @cam_handle : camera handle for which we need capability
7711 *
7712 * RETURN : ptr type of capability structure
7713 * capability for success
7714 * NULL for failure
7715 *==========================================================================*/
7716cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7717 uint32_t cam_handle)
7718{
7719 int rc = NO_ERROR;
7720 QCamera3HeapMemory *capabilityHeap = NULL;
7721 cam_capability_t *cap_ptr = NULL;
7722
7723 if (ops == NULL) {
7724 LOGE("Invalid arguments");
7725 return NULL;
7726 }
7727
7728 capabilityHeap = new QCamera3HeapMemory(1);
7729 if (capabilityHeap == NULL) {
7730 LOGE("creation of capabilityHeap failed");
7731 return NULL;
7732 }
7733
7734 /* Allocate memory for capability buffer */
7735 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7736 if(rc != OK) {
7737 LOGE("No memory for cappability");
7738 goto allocate_failed;
7739 }
7740
7741 /* Map memory for capability buffer */
7742 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7743
7744 rc = ops->map_buf(cam_handle,
7745 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7746 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7747 if(rc < 0) {
7748 LOGE("failed to map capability buffer");
7749 rc = FAILED_TRANSACTION;
7750 goto map_failed;
7751 }
7752
7753 /* Query Capability */
7754 rc = ops->query_capability(cam_handle);
7755 if(rc < 0) {
7756 LOGE("failed to query capability");
7757 rc = FAILED_TRANSACTION;
7758 goto query_failed;
7759 }
7760
7761 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7762 if (cap_ptr == NULL) {
7763 LOGE("out of memory");
7764 rc = NO_MEMORY;
7765 goto query_failed;
7766 }
7767
7768 memset(cap_ptr, 0, sizeof(cam_capability_t));
7769 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7770
7771 int index;
7772 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7773 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7774 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7775 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7776 }
7777
7778query_failed:
7779 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7780map_failed:
7781 capabilityHeap->deallocate();
7782allocate_failed:
7783 delete capabilityHeap;
7784
7785 if (rc != NO_ERROR) {
7786 return NULL;
7787 } else {
7788 return cap_ptr;
7789 }
7790}
7791
Thierry Strudel3d639192016-09-09 11:52:26 -07007792/*===========================================================================
7793 * FUNCTION : initCapabilities
7794 *
7795 * DESCRIPTION: initialize camera capabilities in static data struct
7796 *
7797 * PARAMETERS :
7798 * @cameraId : camera Id
7799 *
7800 * RETURN : int32_t type of status
7801 * NO_ERROR -- success
7802 * none-zero failure code
7803 *==========================================================================*/
7804int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7805{
7806 int rc = 0;
7807 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007808 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007809
7810 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7811 if (rc) {
7812 LOGE("camera_open failed. rc = %d", rc);
7813 goto open_failed;
7814 }
7815 if (!cameraHandle) {
7816 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7817 goto open_failed;
7818 }
7819
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007820 handle = get_main_camera_handle(cameraHandle->camera_handle);
7821 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7822 if (gCamCapability[cameraId] == NULL) {
7823 rc = FAILED_TRANSACTION;
7824 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007825 }
7826
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007827 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007828 if (is_dual_camera_by_idx(cameraId)) {
7829 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7830 gCamCapability[cameraId]->aux_cam_cap =
7831 getCapabilities(cameraHandle->ops, handle);
7832 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7833 rc = FAILED_TRANSACTION;
7834 free(gCamCapability[cameraId]);
7835 goto failed_op;
7836 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007837
7838 // Copy the main camera capability to main_cam_cap struct
7839 gCamCapability[cameraId]->main_cam_cap =
7840 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7841 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7842 LOGE("out of memory");
7843 rc = NO_MEMORY;
7844 goto failed_op;
7845 }
7846 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7847 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007848 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007849failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007850 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7851 cameraHandle = NULL;
7852open_failed:
7853 return rc;
7854}
7855
7856/*==========================================================================
7857 * FUNCTION : get3Aversion
7858 *
7859 * DESCRIPTION: get the Q3A S/W version
7860 *
7861 * PARAMETERS :
7862 * @sw_version: Reference of Q3A structure which will hold version info upon
7863 * return
7864 *
7865 * RETURN : None
7866 *
7867 *==========================================================================*/
7868void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7869{
7870 if(gCamCapability[mCameraId])
7871 sw_version = gCamCapability[mCameraId]->q3a_version;
7872 else
7873 LOGE("Capability structure NULL!");
7874}
7875
7876
7877/*===========================================================================
7878 * FUNCTION : initParameters
7879 *
7880 * DESCRIPTION: initialize camera parameters
7881 *
7882 * PARAMETERS :
7883 *
7884 * RETURN : int32_t type of status
7885 * NO_ERROR -- success
7886 * none-zero failure code
7887 *==========================================================================*/
7888int QCamera3HardwareInterface::initParameters()
7889{
7890 int rc = 0;
7891
7892 //Allocate Set Param Buffer
7893 mParamHeap = new QCamera3HeapMemory(1);
7894 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7895 if(rc != OK) {
7896 rc = NO_MEMORY;
7897 LOGE("Failed to allocate SETPARM Heap memory");
7898 delete mParamHeap;
7899 mParamHeap = NULL;
7900 return rc;
7901 }
7902
7903 //Map memory for parameters buffer
7904 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7905 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7906 mParamHeap->getFd(0),
7907 sizeof(metadata_buffer_t),
7908 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7909 if(rc < 0) {
7910 LOGE("failed to map SETPARM buffer");
7911 rc = FAILED_TRANSACTION;
7912 mParamHeap->deallocate();
7913 delete mParamHeap;
7914 mParamHeap = NULL;
7915 return rc;
7916 }
7917
7918 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7919
7920 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7921 return rc;
7922}
7923
7924/*===========================================================================
7925 * FUNCTION : deinitParameters
7926 *
7927 * DESCRIPTION: de-initialize camera parameters
7928 *
7929 * PARAMETERS :
7930 *
7931 * RETURN : NONE
7932 *==========================================================================*/
7933void QCamera3HardwareInterface::deinitParameters()
7934{
7935 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7936 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7937
7938 mParamHeap->deallocate();
7939 delete mParamHeap;
7940 mParamHeap = NULL;
7941
7942 mParameters = NULL;
7943
7944 free(mPrevParameters);
7945 mPrevParameters = NULL;
7946}
7947
7948/*===========================================================================
7949 * FUNCTION : calcMaxJpegSize
7950 *
7951 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7952 *
7953 * PARAMETERS :
7954 *
7955 * RETURN : max_jpeg_size
7956 *==========================================================================*/
7957size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7958{
7959 size_t max_jpeg_size = 0;
7960 size_t temp_width, temp_height;
7961 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7962 MAX_SIZES_CNT);
7963 for (size_t i = 0; i < count; i++) {
7964 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7965 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7966 if (temp_width * temp_height > max_jpeg_size ) {
7967 max_jpeg_size = temp_width * temp_height;
7968 }
7969 }
7970 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7971 return max_jpeg_size;
7972}
7973
7974/*===========================================================================
7975 * FUNCTION : getMaxRawSize
7976 *
7977 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7978 *
7979 * PARAMETERS :
7980 *
7981 * RETURN : Largest supported Raw Dimension
7982 *==========================================================================*/
7983cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7984{
7985 int max_width = 0;
7986 cam_dimension_t maxRawSize;
7987
7988 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7989 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7990 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7991 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7992 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7993 }
7994 }
7995 return maxRawSize;
7996}
7997
7998
7999/*===========================================================================
8000 * FUNCTION : calcMaxJpegDim
8001 *
8002 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8003 *
8004 * PARAMETERS :
8005 *
8006 * RETURN : max_jpeg_dim
8007 *==========================================================================*/
8008cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8009{
8010 cam_dimension_t max_jpeg_dim;
8011 cam_dimension_t curr_jpeg_dim;
8012 max_jpeg_dim.width = 0;
8013 max_jpeg_dim.height = 0;
8014 curr_jpeg_dim.width = 0;
8015 curr_jpeg_dim.height = 0;
8016 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8017 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8018 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8019 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8020 max_jpeg_dim.width * max_jpeg_dim.height ) {
8021 max_jpeg_dim.width = curr_jpeg_dim.width;
8022 max_jpeg_dim.height = curr_jpeg_dim.height;
8023 }
8024 }
8025 return max_jpeg_dim;
8026}
8027
8028/*===========================================================================
8029 * FUNCTION : addStreamConfig
8030 *
8031 * DESCRIPTION: adds the stream configuration to the array
8032 *
8033 * PARAMETERS :
8034 * @available_stream_configs : pointer to stream configuration array
8035 * @scalar_format : scalar format
8036 * @dim : configuration dimension
8037 * @config_type : input or output configuration type
8038 *
8039 * RETURN : NONE
8040 *==========================================================================*/
8041void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8042 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8043{
8044 available_stream_configs.add(scalar_format);
8045 available_stream_configs.add(dim.width);
8046 available_stream_configs.add(dim.height);
8047 available_stream_configs.add(config_type);
8048}
8049
8050/*===========================================================================
8051 * FUNCTION : suppportBurstCapture
8052 *
8053 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8054 *
8055 * PARAMETERS :
8056 * @cameraId : camera Id
8057 *
8058 * RETURN : true if camera supports BURST_CAPTURE
8059 * false otherwise
8060 *==========================================================================*/
8061bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8062{
8063 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8064 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8065 const int32_t highResWidth = 3264;
8066 const int32_t highResHeight = 2448;
8067
8068 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8069 // Maximum resolution images cannot be captured at >= 10fps
8070 // -> not supporting BURST_CAPTURE
8071 return false;
8072 }
8073
8074 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8075 // Maximum resolution images can be captured at >= 20fps
8076 // --> supporting BURST_CAPTURE
8077 return true;
8078 }
8079
8080 // Find the smallest highRes resolution, or largest resolution if there is none
8081 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8082 MAX_SIZES_CNT);
8083 size_t highRes = 0;
8084 while ((highRes + 1 < totalCnt) &&
8085 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8086 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8087 highResWidth * highResHeight)) {
8088 highRes++;
8089 }
8090 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8091 return true;
8092 } else {
8093 return false;
8094 }
8095}
8096
8097/*===========================================================================
8098 * FUNCTION : initStaticMetadata
8099 *
8100 * DESCRIPTION: initialize the static metadata
8101 *
8102 * PARAMETERS :
8103 * @cameraId : camera Id
8104 *
8105 * RETURN : int32_t type of status
8106 * 0 -- success
8107 * non-zero failure code
8108 *==========================================================================*/
8109int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8110{
8111 int rc = 0;
8112 CameraMetadata staticInfo;
8113 size_t count = 0;
8114 bool limitedDevice = false;
8115 char prop[PROPERTY_VALUE_MAX];
8116 bool supportBurst = false;
8117
8118 supportBurst = supportBurstCapture(cameraId);
8119
8120 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8121 * guaranteed or if min fps of max resolution is less than 20 fps, its
8122 * advertised as limited device*/
8123 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8124 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8125 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8126 !supportBurst;
8127
8128 uint8_t supportedHwLvl = limitedDevice ?
8129 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008130#ifndef USE_HAL_3_3
8131 // LEVEL_3 - This device will support level 3.
8132 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8133#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008135#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008136
8137 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8138 &supportedHwLvl, 1);
8139
8140 bool facingBack = false;
8141 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8142 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8143 facingBack = true;
8144 }
8145 /*HAL 3 only*/
8146 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8147 &gCamCapability[cameraId]->min_focus_distance, 1);
8148
8149 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8150 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8151
8152 /*should be using focal lengths but sensor doesn't provide that info now*/
8153 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8154 &gCamCapability[cameraId]->focal_length,
8155 1);
8156
8157 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8158 gCamCapability[cameraId]->apertures,
8159 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8160
8161 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8162 gCamCapability[cameraId]->filter_densities,
8163 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8164
8165
8166 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8167 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8168 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8169
8170 int32_t lens_shading_map_size[] = {
8171 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8172 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8173 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8174 lens_shading_map_size,
8175 sizeof(lens_shading_map_size)/sizeof(int32_t));
8176
8177 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8178 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8179
8180 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8181 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8182
8183 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8184 &gCamCapability[cameraId]->max_frame_duration, 1);
8185
8186 camera_metadata_rational baseGainFactor = {
8187 gCamCapability[cameraId]->base_gain_factor.numerator,
8188 gCamCapability[cameraId]->base_gain_factor.denominator};
8189 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8190 &baseGainFactor, 1);
8191
8192 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8193 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8194
8195 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8196 gCamCapability[cameraId]->pixel_array_size.height};
8197 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8198 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8199
8200 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8201 gCamCapability[cameraId]->active_array_size.top,
8202 gCamCapability[cameraId]->active_array_size.width,
8203 gCamCapability[cameraId]->active_array_size.height};
8204 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8205 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8206
8207 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8208 &gCamCapability[cameraId]->white_level, 1);
8209
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008210 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8211 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8212 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008213 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008214 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008215
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008216#ifndef USE_HAL_3_3
8217 bool hasBlackRegions = false;
8218 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8219 LOGW("black_region_count: %d is bounded to %d",
8220 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8221 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8222 }
8223 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8224 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8225 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8226 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8227 }
8228 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8229 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8230 hasBlackRegions = true;
8231 }
8232#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008233 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8234 &gCamCapability[cameraId]->flash_charge_duration, 1);
8235
8236 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8237 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8238
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008239 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8240 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8241 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008242 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8243 &timestampSource, 1);
8244
8245 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8246 &gCamCapability[cameraId]->histogram_size, 1);
8247
8248 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8249 &gCamCapability[cameraId]->max_histogram_count, 1);
8250
8251 int32_t sharpness_map_size[] = {
8252 gCamCapability[cameraId]->sharpness_map_size.width,
8253 gCamCapability[cameraId]->sharpness_map_size.height};
8254
8255 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8256 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8257
8258 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8259 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8260
8261 int32_t scalar_formats[] = {
8262 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8263 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8264 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8265 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8266 HAL_PIXEL_FORMAT_RAW10,
8267 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8268 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8269 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8270 scalar_formats,
8271 scalar_formats_count);
8272
8273 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8274 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8275 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8276 count, MAX_SIZES_CNT, available_processed_sizes);
8277 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8278 available_processed_sizes, count * 2);
8279
8280 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8281 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8282 makeTable(gCamCapability[cameraId]->raw_dim,
8283 count, MAX_SIZES_CNT, available_raw_sizes);
8284 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8285 available_raw_sizes, count * 2);
8286
8287 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8288 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8289 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8290 count, MAX_SIZES_CNT, available_fps_ranges);
8291 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8292 available_fps_ranges, count * 2);
8293
8294 camera_metadata_rational exposureCompensationStep = {
8295 gCamCapability[cameraId]->exp_compensation_step.numerator,
8296 gCamCapability[cameraId]->exp_compensation_step.denominator};
8297 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8298 &exposureCompensationStep, 1);
8299
8300 Vector<uint8_t> availableVstabModes;
8301 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8302 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008303 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008304 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008305 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008306 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008307 count = IS_TYPE_MAX;
8308 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8309 for (size_t i = 0; i < count; i++) {
8310 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8311 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8312 eisSupported = true;
8313 break;
8314 }
8315 }
8316 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008317 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8318 }
8319 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8320 availableVstabModes.array(), availableVstabModes.size());
8321
8322 /*HAL 1 and HAL 3 common*/
8323 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8324 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8325 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8326 float maxZoom = maxZoomStep/minZoomStep;
8327 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8328 &maxZoom, 1);
8329
8330 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8331 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8332
8333 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8334 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8335 max3aRegions[2] = 0; /* AF not supported */
8336 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8337 max3aRegions, 3);
8338
8339 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8340 memset(prop, 0, sizeof(prop));
8341 property_get("persist.camera.facedetect", prop, "1");
8342 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8343 LOGD("Support face detection mode: %d",
8344 supportedFaceDetectMode);
8345
8346 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008347 /* support mode should be OFF if max number of face is 0 */
8348 if (maxFaces <= 0) {
8349 supportedFaceDetectMode = 0;
8350 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008351 Vector<uint8_t> availableFaceDetectModes;
8352 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8353 if (supportedFaceDetectMode == 1) {
8354 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8355 } else if (supportedFaceDetectMode == 2) {
8356 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8357 } else if (supportedFaceDetectMode == 3) {
8358 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8359 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8360 } else {
8361 maxFaces = 0;
8362 }
8363 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8364 availableFaceDetectModes.array(),
8365 availableFaceDetectModes.size());
8366 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8367 (int32_t *)&maxFaces, 1);
8368
8369 int32_t exposureCompensationRange[] = {
8370 gCamCapability[cameraId]->exposure_compensation_min,
8371 gCamCapability[cameraId]->exposure_compensation_max};
8372 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8373 exposureCompensationRange,
8374 sizeof(exposureCompensationRange)/sizeof(int32_t));
8375
8376 uint8_t lensFacing = (facingBack) ?
8377 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8378 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8379
8380 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8381 available_thumbnail_sizes,
8382 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8383
8384 /*all sizes will be clubbed into this tag*/
8385 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8386 /*android.scaler.availableStreamConfigurations*/
8387 Vector<int32_t> available_stream_configs;
8388 cam_dimension_t active_array_dim;
8389 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8390 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8391 /* Add input/output stream configurations for each scalar formats*/
8392 for (size_t j = 0; j < scalar_formats_count; j++) {
8393 switch (scalar_formats[j]) {
8394 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8395 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8396 case HAL_PIXEL_FORMAT_RAW10:
8397 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8398 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8399 addStreamConfig(available_stream_configs, scalar_formats[j],
8400 gCamCapability[cameraId]->raw_dim[i],
8401 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8402 }
8403 break;
8404 case HAL_PIXEL_FORMAT_BLOB:
8405 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8406 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8407 addStreamConfig(available_stream_configs, scalar_formats[j],
8408 gCamCapability[cameraId]->picture_sizes_tbl[i],
8409 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8410 }
8411 break;
8412 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8413 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8414 default:
8415 cam_dimension_t largest_picture_size;
8416 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8417 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8418 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8419 addStreamConfig(available_stream_configs, scalar_formats[j],
8420 gCamCapability[cameraId]->picture_sizes_tbl[i],
8421 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8422 /* Book keep largest */
8423 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8424 >= largest_picture_size.width &&
8425 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8426 >= largest_picture_size.height)
8427 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8428 }
8429 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8430 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8431 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8432 addStreamConfig(available_stream_configs, scalar_formats[j],
8433 largest_picture_size,
8434 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8435 }
8436 break;
8437 }
8438 }
8439
8440 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8441 available_stream_configs.array(), available_stream_configs.size());
8442 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8443 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8444
8445 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8446 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8447
8448 /* android.scaler.availableMinFrameDurations */
8449 Vector<int64_t> available_min_durations;
8450 for (size_t j = 0; j < scalar_formats_count; j++) {
8451 switch (scalar_formats[j]) {
8452 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8453 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8454 case HAL_PIXEL_FORMAT_RAW10:
8455 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8456 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8457 available_min_durations.add(scalar_formats[j]);
8458 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8459 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8460 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8461 }
8462 break;
8463 default:
8464 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8465 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8466 available_min_durations.add(scalar_formats[j]);
8467 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8468 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8469 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8470 }
8471 break;
8472 }
8473 }
8474 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8475 available_min_durations.array(), available_min_durations.size());
8476
8477 Vector<int32_t> available_hfr_configs;
8478 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8479 int32_t fps = 0;
8480 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8481 case CAM_HFR_MODE_60FPS:
8482 fps = 60;
8483 break;
8484 case CAM_HFR_MODE_90FPS:
8485 fps = 90;
8486 break;
8487 case CAM_HFR_MODE_120FPS:
8488 fps = 120;
8489 break;
8490 case CAM_HFR_MODE_150FPS:
8491 fps = 150;
8492 break;
8493 case CAM_HFR_MODE_180FPS:
8494 fps = 180;
8495 break;
8496 case CAM_HFR_MODE_210FPS:
8497 fps = 210;
8498 break;
8499 case CAM_HFR_MODE_240FPS:
8500 fps = 240;
8501 break;
8502 case CAM_HFR_MODE_480FPS:
8503 fps = 480;
8504 break;
8505 case CAM_HFR_MODE_OFF:
8506 case CAM_HFR_MODE_MAX:
8507 default:
8508 break;
8509 }
8510
8511 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8512 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8513 /* For each HFR frame rate, need to advertise one variable fps range
8514 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8515 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8516 * set by the app. When video recording is started, [120, 120] is
8517 * set. This way sensor configuration does not change when recording
8518 * is started */
8519
8520 /* (width, height, fps_min, fps_max, batch_size_max) */
8521 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8522 j < MAX_SIZES_CNT; j++) {
8523 available_hfr_configs.add(
8524 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8525 available_hfr_configs.add(
8526 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8527 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8528 available_hfr_configs.add(fps);
8529 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8530
8531 /* (width, height, fps_min, fps_max, batch_size_max) */
8532 available_hfr_configs.add(
8533 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8534 available_hfr_configs.add(
8535 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8536 available_hfr_configs.add(fps);
8537 available_hfr_configs.add(fps);
8538 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8539 }
8540 }
8541 }
8542 //Advertise HFR capability only if the property is set
8543 memset(prop, 0, sizeof(prop));
8544 property_get("persist.camera.hal3hfr.enable", prop, "1");
8545 uint8_t hfrEnable = (uint8_t)atoi(prop);
8546
8547 if(hfrEnable && available_hfr_configs.array()) {
8548 staticInfo.update(
8549 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8550 available_hfr_configs.array(), available_hfr_configs.size());
8551 }
8552
8553 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8554 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8555 &max_jpeg_size, 1);
8556
8557 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8558 size_t size = 0;
8559 count = CAM_EFFECT_MODE_MAX;
8560 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8561 for (size_t i = 0; i < count; i++) {
8562 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8563 gCamCapability[cameraId]->supported_effects[i]);
8564 if (NAME_NOT_FOUND != val) {
8565 avail_effects[size] = (uint8_t)val;
8566 size++;
8567 }
8568 }
8569 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8570 avail_effects,
8571 size);
8572
8573 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8574 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8575 size_t supported_scene_modes_cnt = 0;
8576 count = CAM_SCENE_MODE_MAX;
8577 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8578 for (size_t i = 0; i < count; i++) {
8579 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8580 CAM_SCENE_MODE_OFF) {
8581 int val = lookupFwkName(SCENE_MODES_MAP,
8582 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8583 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08008584
Thierry Strudel3d639192016-09-09 11:52:26 -07008585 if (NAME_NOT_FOUND != val) {
8586 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8587 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8588 supported_scene_modes_cnt++;
8589 }
8590 }
8591 }
8592 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8593 avail_scene_modes,
8594 supported_scene_modes_cnt);
8595
8596 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8597 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8598 supported_scene_modes_cnt,
8599 CAM_SCENE_MODE_MAX,
8600 scene_mode_overrides,
8601 supported_indexes,
8602 cameraId);
8603
8604 if (supported_scene_modes_cnt == 0) {
8605 supported_scene_modes_cnt = 1;
8606 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8607 }
8608
8609 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8610 scene_mode_overrides, supported_scene_modes_cnt * 3);
8611
8612 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8613 ANDROID_CONTROL_MODE_AUTO,
8614 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8615 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8616 available_control_modes,
8617 3);
8618
8619 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8620 size = 0;
8621 count = CAM_ANTIBANDING_MODE_MAX;
8622 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8623 for (size_t i = 0; i < count; i++) {
8624 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8625 gCamCapability[cameraId]->supported_antibandings[i]);
8626 if (NAME_NOT_FOUND != val) {
8627 avail_antibanding_modes[size] = (uint8_t)val;
8628 size++;
8629 }
8630
8631 }
8632 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8633 avail_antibanding_modes,
8634 size);
8635
8636 uint8_t avail_abberation_modes[] = {
8637 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8638 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8639 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8640 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8641 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8642 if (0 == count) {
8643 // If no aberration correction modes are available for a device, this advertise OFF mode
8644 size = 1;
8645 } else {
8646 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8647 // So, advertize all 3 modes if atleast any one mode is supported as per the
8648 // new M requirement
8649 size = 3;
8650 }
8651 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8652 avail_abberation_modes,
8653 size);
8654
8655 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8656 size = 0;
8657 count = CAM_FOCUS_MODE_MAX;
8658 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8659 for (size_t i = 0; i < count; i++) {
8660 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8661 gCamCapability[cameraId]->supported_focus_modes[i]);
8662 if (NAME_NOT_FOUND != val) {
8663 avail_af_modes[size] = (uint8_t)val;
8664 size++;
8665 }
8666 }
8667 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8668 avail_af_modes,
8669 size);
8670
8671 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8672 size = 0;
8673 count = CAM_WB_MODE_MAX;
8674 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8675 for (size_t i = 0; i < count; i++) {
8676 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8677 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8678 gCamCapability[cameraId]->supported_white_balances[i]);
8679 if (NAME_NOT_FOUND != val) {
8680 avail_awb_modes[size] = (uint8_t)val;
8681 size++;
8682 }
8683 }
8684 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8685 avail_awb_modes,
8686 size);
8687
8688 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8689 count = CAM_FLASH_FIRING_LEVEL_MAX;
8690 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8691 count);
8692 for (size_t i = 0; i < count; i++) {
8693 available_flash_levels[i] =
8694 gCamCapability[cameraId]->supported_firing_levels[i];
8695 }
8696 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8697 available_flash_levels, count);
8698
8699 uint8_t flashAvailable;
8700 if (gCamCapability[cameraId]->flash_available)
8701 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8702 else
8703 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8704 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8705 &flashAvailable, 1);
8706
8707 Vector<uint8_t> avail_ae_modes;
8708 count = CAM_AE_MODE_MAX;
8709 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8710 for (size_t i = 0; i < count; i++) {
8711 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8712 }
8713 if (flashAvailable) {
8714 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8715 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8716 }
8717 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8718 avail_ae_modes.array(),
8719 avail_ae_modes.size());
8720
8721 int32_t sensitivity_range[2];
8722 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8723 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8724 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8725 sensitivity_range,
8726 sizeof(sensitivity_range) / sizeof(int32_t));
8727
8728 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8729 &gCamCapability[cameraId]->max_analog_sensitivity,
8730 1);
8731
8732 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8733 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8734 &sensor_orientation,
8735 1);
8736
8737 int32_t max_output_streams[] = {
8738 MAX_STALLING_STREAMS,
8739 MAX_PROCESSED_STREAMS,
8740 MAX_RAW_STREAMS};
8741 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8742 max_output_streams,
8743 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8744
8745 uint8_t avail_leds = 0;
8746 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8747 &avail_leds, 0);
8748
8749 uint8_t focus_dist_calibrated;
8750 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8751 gCamCapability[cameraId]->focus_dist_calibrated);
8752 if (NAME_NOT_FOUND != val) {
8753 focus_dist_calibrated = (uint8_t)val;
8754 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8755 &focus_dist_calibrated, 1);
8756 }
8757
8758 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8759 size = 0;
8760 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8761 MAX_TEST_PATTERN_CNT);
8762 for (size_t i = 0; i < count; i++) {
8763 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8764 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8765 if (NAME_NOT_FOUND != testpatternMode) {
8766 avail_testpattern_modes[size] = testpatternMode;
8767 size++;
8768 }
8769 }
8770 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8771 avail_testpattern_modes,
8772 size);
8773
8774 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8775 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8776 &max_pipeline_depth,
8777 1);
8778
8779 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8780 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8781 &partial_result_count,
8782 1);
8783
8784 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8785 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8786
8787 Vector<uint8_t> available_capabilities;
8788 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8789 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8790 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8791 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8792 if (supportBurst) {
8793 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8794 }
8795 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8796 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8797 if (hfrEnable && available_hfr_configs.array()) {
8798 available_capabilities.add(
8799 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8800 }
8801
8802 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8803 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8804 }
8805 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8806 available_capabilities.array(),
8807 available_capabilities.size());
8808
8809 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8810 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8811 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8812 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8813
8814 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8815 &aeLockAvailable, 1);
8816
8817 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8818 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8819 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8820 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8821
8822 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8823 &awbLockAvailable, 1);
8824
8825 int32_t max_input_streams = 1;
8826 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8827 &max_input_streams,
8828 1);
8829
8830 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8831 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8832 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8833 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8834 HAL_PIXEL_FORMAT_YCbCr_420_888};
8835 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8836 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8837
8838 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8839 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8840 &max_latency,
8841 1);
8842
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008843#ifndef USE_HAL_3_3
8844 int32_t isp_sensitivity_range[2];
8845 isp_sensitivity_range[0] =
8846 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8847 isp_sensitivity_range[1] =
8848 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8849 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8850 isp_sensitivity_range,
8851 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8852#endif
8853
Thierry Strudel3d639192016-09-09 11:52:26 -07008854 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8855 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8856 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8857 available_hot_pixel_modes,
8858 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8859
8860 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8861 ANDROID_SHADING_MODE_FAST,
8862 ANDROID_SHADING_MODE_HIGH_QUALITY};
8863 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8864 available_shading_modes,
8865 3);
8866
8867 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8868 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8869 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8870 available_lens_shading_map_modes,
8871 2);
8872
8873 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8874 ANDROID_EDGE_MODE_FAST,
8875 ANDROID_EDGE_MODE_HIGH_QUALITY,
8876 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8877 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8878 available_edge_modes,
8879 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8880
8881 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8882 ANDROID_NOISE_REDUCTION_MODE_FAST,
8883 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8884 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8885 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8886 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8887 available_noise_red_modes,
8888 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8889
8890 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8891 ANDROID_TONEMAP_MODE_FAST,
8892 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8893 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8894 available_tonemap_modes,
8895 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8896
8897 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8898 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8899 available_hot_pixel_map_modes,
8900 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8901
8902 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8903 gCamCapability[cameraId]->reference_illuminant1);
8904 if (NAME_NOT_FOUND != val) {
8905 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8906 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8907 }
8908
8909 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8910 gCamCapability[cameraId]->reference_illuminant2);
8911 if (NAME_NOT_FOUND != val) {
8912 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8913 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8914 }
8915
8916 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8917 (void *)gCamCapability[cameraId]->forward_matrix1,
8918 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8919
8920 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8921 (void *)gCamCapability[cameraId]->forward_matrix2,
8922 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8923
8924 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8925 (void *)gCamCapability[cameraId]->color_transform1,
8926 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8927
8928 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8929 (void *)gCamCapability[cameraId]->color_transform2,
8930 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8931
8932 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8933 (void *)gCamCapability[cameraId]->calibration_transform1,
8934 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8935
8936 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8937 (void *)gCamCapability[cameraId]->calibration_transform2,
8938 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8939
8940 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8941 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8942 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8943 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8944 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8945 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8946 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8947 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8948 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8949 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8950 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8951 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8952 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8953 ANDROID_JPEG_GPS_COORDINATES,
8954 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8955 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8956 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8957 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8958 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8959 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8960 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8961 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8962 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8963 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008964#ifndef USE_HAL_3_3
8965 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8966#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008967 ANDROID_STATISTICS_FACE_DETECT_MODE,
8968 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8969 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8970 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008971 ANDROID_BLACK_LEVEL_LOCK,
8972 /* DevCamDebug metadata request_keys_basic */
8973 DEVCAMDEBUG_META_ENABLE,
8974 /* DevCamDebug metadata end */
8975 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008976
8977 size_t request_keys_cnt =
8978 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8979 Vector<int32_t> available_request_keys;
8980 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8981 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8982 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8983 }
8984
8985 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8986 available_request_keys.array(), available_request_keys.size());
8987
8988 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8989 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8990 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8991 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8992 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8993 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8994 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8995 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8996 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8997 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8998 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8999 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9000 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9001 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9002 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9003 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9004 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
9005 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
9006 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9007 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9008 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009009 ANDROID_STATISTICS_FACE_SCORES,
9010#ifndef USE_HAL_3_3
9011 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9012#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07009013 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009014 // DevCamDebug metadata result_keys_basic
9015 DEVCAMDEBUG_META_ENABLE,
9016 // DevCamDebug metadata result_keys AF
9017 DEVCAMDEBUG_AF_LENS_POSITION,
9018 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9019 DEVCAMDEBUG_AF_TOF_DISTANCE,
9020 DEVCAMDEBUG_AF_LUMA,
9021 DEVCAMDEBUG_AF_HAF_STATE,
9022 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9023 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9024 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9025 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9026 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9027 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9028 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9029 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9030 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9031 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9032 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9033 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9034 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9035 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9036 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9037 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9038 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9039 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9040 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9041 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9042 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9043 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9044 // DevCamDebug metadata result_keys AEC
9045 DEVCAMDEBUG_AEC_TARGET_LUMA,
9046 DEVCAMDEBUG_AEC_COMP_LUMA,
9047 DEVCAMDEBUG_AEC_AVG_LUMA,
9048 DEVCAMDEBUG_AEC_CUR_LUMA,
9049 DEVCAMDEBUG_AEC_LINECOUNT,
9050 DEVCAMDEBUG_AEC_REAL_GAIN,
9051 DEVCAMDEBUG_AEC_EXP_INDEX,
9052 DEVCAMDEBUG_AEC_LUX_IDX,
9053 // DevCamDebug metadata result_keys AWB
9054 DEVCAMDEBUG_AWB_R_GAIN,
9055 DEVCAMDEBUG_AWB_G_GAIN,
9056 DEVCAMDEBUG_AWB_B_GAIN,
9057 DEVCAMDEBUG_AWB_CCT,
9058 DEVCAMDEBUG_AWB_DECISION,
9059 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009060 };
9061
Thierry Strudel3d639192016-09-09 11:52:26 -07009062 size_t result_keys_cnt =
9063 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9064
9065 Vector<int32_t> available_result_keys;
9066 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9067 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9068 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9069 }
9070 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9071 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9072 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9073 }
9074 if (supportedFaceDetectMode == 1) {
9075 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9076 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9077 } else if ((supportedFaceDetectMode == 2) ||
9078 (supportedFaceDetectMode == 3)) {
9079 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9080 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9081 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009082#ifndef USE_HAL_3_3
9083 if (hasBlackRegions) {
9084 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9085 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9086 }
9087#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009088 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9089 available_result_keys.array(), available_result_keys.size());
9090
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009091 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009092 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9093 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9094 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9095 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9096 ANDROID_SCALER_CROPPING_TYPE,
9097 ANDROID_SYNC_MAX_LATENCY,
9098 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9099 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9100 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9101 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9102 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9103 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9104 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9105 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9106 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9107 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9108 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9109 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9110 ANDROID_LENS_FACING,
9111 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9112 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9113 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9114 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9115 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9116 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9117 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9118 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9119 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9120 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9121 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9122 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9123 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9124 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9125 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9126 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9127 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9128 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9129 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9130 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9131 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
9132 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
9133 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9134 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9135 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9136 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9137 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9138 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9139 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9140 ANDROID_CONTROL_AVAILABLE_MODES,
9141 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9142 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9143 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9144 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009145 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9146#ifndef USE_HAL_3_3
9147 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9148 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9149#endif
9150 };
9151
9152 Vector<int32_t> available_characteristics_keys;
9153 available_characteristics_keys.appendArray(characteristics_keys_basic,
9154 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9155#ifndef USE_HAL_3_3
9156 if (hasBlackRegions) {
9157 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9158 }
9159#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009160 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009161 available_characteristics_keys.array(),
9162 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009163
9164 /*available stall durations depend on the hw + sw and will be different for different devices */
9165 /*have to add for raw after implementation*/
9166 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9167 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9168
9169 Vector<int64_t> available_stall_durations;
9170 for (uint32_t j = 0; j < stall_formats_count; j++) {
9171 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9172 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9173 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9174 available_stall_durations.add(stall_formats[j]);
9175 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9176 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9177 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9178 }
9179 } else {
9180 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9181 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9182 available_stall_durations.add(stall_formats[j]);
9183 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9184 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9185 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9186 }
9187 }
9188 }
9189 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9190 available_stall_durations.array(),
9191 available_stall_durations.size());
9192
9193 //QCAMERA3_OPAQUE_RAW
9194 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9195 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9196 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9197 case LEGACY_RAW:
9198 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9199 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9200 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9201 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9202 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9203 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9204 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9205 break;
9206 case MIPI_RAW:
9207 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9208 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9209 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9210 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9211 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9212 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9213 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9214 break;
9215 default:
9216 LOGE("unknown opaque_raw_format %d",
9217 gCamCapability[cameraId]->opaque_raw_fmt);
9218 break;
9219 }
9220 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9221
9222 Vector<int32_t> strides;
9223 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9224 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9225 cam_stream_buf_plane_info_t buf_planes;
9226 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9227 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9228 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9229 &gCamCapability[cameraId]->padding_info, &buf_planes);
9230 strides.add(buf_planes.plane_info.mp[0].stride);
9231 }
9232 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9233 strides.size());
9234
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009235 //TBD: remove the following line once backend advertises zzHDR in feature mask
9236 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009237 //Video HDR default
9238 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9239 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009240 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -07009241 int32_t vhdr_mode[] = {
9242 QCAMERA3_VIDEO_HDR_MODE_OFF,
9243 QCAMERA3_VIDEO_HDR_MODE_ON};
9244
9245 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9246 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9247 vhdr_mode, vhdr_mode_count);
9248 }
9249
Thierry Strudel3d639192016-09-09 11:52:26 -07009250 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9251 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9252 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9253
9254 uint8_t isMonoOnly =
9255 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9256 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9257 &isMonoOnly, 1);
9258
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009259#ifndef USE_HAL_3_3
9260 Vector<int32_t> opaque_size;
9261 for (size_t j = 0; j < scalar_formats_count; j++) {
9262 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9263 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9264 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9265 cam_stream_buf_plane_info_t buf_planes;
9266
9267 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9268 &gCamCapability[cameraId]->padding_info, &buf_planes);
9269
9270 if (rc == 0) {
9271 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9272 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9273 opaque_size.add(buf_planes.plane_info.frame_len);
9274 }else {
9275 LOGE("raw frame calculation failed!");
9276 }
9277 }
9278 }
9279 }
9280
9281 if ((opaque_size.size() > 0) &&
9282 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9283 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9284 else
9285 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9286#endif
9287
Thierry Strudel04e026f2016-10-10 11:27:36 -07009288 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9289 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9290 size = 0;
9291 count = CAM_IR_MODE_MAX;
9292 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9293 for (size_t i = 0; i < count; i++) {
9294 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9295 gCamCapability[cameraId]->supported_ir_modes[i]);
9296 if (NAME_NOT_FOUND != val) {
9297 avail_ir_modes[size] = (int32_t)val;
9298 size++;
9299 }
9300 }
9301 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9302 avail_ir_modes, size);
9303 }
9304
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009305 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9306 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9307 size = 0;
9308 count = CAM_AEC_CONVERGENCE_MAX;
9309 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9310 for (size_t i = 0; i < count; i++) {
9311 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9312 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9313 if (NAME_NOT_FOUND != val) {
9314 available_instant_aec_modes[size] = (int32_t)val;
9315 size++;
9316 }
9317 }
9318 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9319 available_instant_aec_modes, size);
9320 }
9321
Thierry Strudel3d639192016-09-09 11:52:26 -07009322 gStaticMetadata[cameraId] = staticInfo.release();
9323 return rc;
9324}
9325
9326/*===========================================================================
9327 * FUNCTION : makeTable
9328 *
9329 * DESCRIPTION: make a table of sizes
9330 *
9331 * PARAMETERS :
9332 *
9333 *
9334 *==========================================================================*/
9335void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9336 size_t max_size, int32_t *sizeTable)
9337{
9338 size_t j = 0;
9339 if (size > max_size) {
9340 size = max_size;
9341 }
9342 for (size_t i = 0; i < size; i++) {
9343 sizeTable[j] = dimTable[i].width;
9344 sizeTable[j+1] = dimTable[i].height;
9345 j+=2;
9346 }
9347}
9348
9349/*===========================================================================
9350 * FUNCTION : makeFPSTable
9351 *
9352 * DESCRIPTION: make a table of fps ranges
9353 *
9354 * PARAMETERS :
9355 *
9356 *==========================================================================*/
9357void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9358 size_t max_size, int32_t *fpsRangesTable)
9359{
9360 size_t j = 0;
9361 if (size > max_size) {
9362 size = max_size;
9363 }
9364 for (size_t i = 0; i < size; i++) {
9365 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9366 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9367 j+=2;
9368 }
9369}
9370
9371/*===========================================================================
9372 * FUNCTION : makeOverridesList
9373 *
9374 * DESCRIPTION: make a list of scene mode overrides
9375 *
9376 * PARAMETERS :
9377 *
9378 *
9379 *==========================================================================*/
9380void QCamera3HardwareInterface::makeOverridesList(
9381 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9382 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9383{
9384 /*daemon will give a list of overrides for all scene modes.
9385 However we should send the fwk only the overrides for the scene modes
9386 supported by the framework*/
9387 size_t j = 0;
9388 if (size > max_size) {
9389 size = max_size;
9390 }
9391 size_t focus_count = CAM_FOCUS_MODE_MAX;
9392 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9393 focus_count);
9394 for (size_t i = 0; i < size; i++) {
9395 bool supt = false;
9396 size_t index = supported_indexes[i];
9397 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9398 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9399 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9400 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9401 overridesTable[index].awb_mode);
9402 if (NAME_NOT_FOUND != val) {
9403 overridesList[j+1] = (uint8_t)val;
9404 }
9405 uint8_t focus_override = overridesTable[index].af_mode;
9406 for (size_t k = 0; k < focus_count; k++) {
9407 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9408 supt = true;
9409 break;
9410 }
9411 }
9412 if (supt) {
9413 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9414 focus_override);
9415 if (NAME_NOT_FOUND != val) {
9416 overridesList[j+2] = (uint8_t)val;
9417 }
9418 } else {
9419 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9420 }
9421 j+=3;
9422 }
9423}
9424
9425/*===========================================================================
9426 * FUNCTION : filterJpegSizes
9427 *
9428 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9429 * could be downscaled to
9430 *
9431 * PARAMETERS :
9432 *
9433 * RETURN : length of jpegSizes array
9434 *==========================================================================*/
9435
9436size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9437 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9438 uint8_t downscale_factor)
9439{
9440 if (0 == downscale_factor) {
9441 downscale_factor = 1;
9442 }
9443
9444 int32_t min_width = active_array_size.width / downscale_factor;
9445 int32_t min_height = active_array_size.height / downscale_factor;
9446 size_t jpegSizesCnt = 0;
9447 if (processedSizesCnt > maxCount) {
9448 processedSizesCnt = maxCount;
9449 }
9450 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9451 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9452 jpegSizes[jpegSizesCnt] = processedSizes[i];
9453 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9454 jpegSizesCnt += 2;
9455 }
9456 }
9457 return jpegSizesCnt;
9458}
9459
9460/*===========================================================================
9461 * FUNCTION : computeNoiseModelEntryS
9462 *
9463 * DESCRIPTION: function to map a given sensitivity to the S noise
9464 * model parameters in the DNG noise model.
9465 *
9466 * PARAMETERS : sens : the sensor sensitivity
9467 *
9468 ** RETURN : S (sensor amplification) noise
9469 *
9470 *==========================================================================*/
9471double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9472 double s = gCamCapability[mCameraId]->gradient_S * sens +
9473 gCamCapability[mCameraId]->offset_S;
9474 return ((s < 0.0) ? 0.0 : s);
9475}
9476
9477/*===========================================================================
9478 * FUNCTION : computeNoiseModelEntryO
9479 *
9480 * DESCRIPTION: function to map a given sensitivity to the O noise
9481 * model parameters in the DNG noise model.
9482 *
9483 * PARAMETERS : sens : the sensor sensitivity
9484 *
9485 ** RETURN : O (sensor readout) noise
9486 *
9487 *==========================================================================*/
9488double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9489 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9490 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9491 1.0 : (1.0 * sens / max_analog_sens);
9492 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9493 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9494 return ((o < 0.0) ? 0.0 : o);
9495}
9496
9497/*===========================================================================
9498 * FUNCTION : getSensorSensitivity
9499 *
9500 * DESCRIPTION: convert iso_mode to an integer value
9501 *
9502 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9503 *
9504 ** RETURN : sensitivity supported by sensor
9505 *
9506 *==========================================================================*/
9507int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9508{
9509 int32_t sensitivity;
9510
9511 switch (iso_mode) {
9512 case CAM_ISO_MODE_100:
9513 sensitivity = 100;
9514 break;
9515 case CAM_ISO_MODE_200:
9516 sensitivity = 200;
9517 break;
9518 case CAM_ISO_MODE_400:
9519 sensitivity = 400;
9520 break;
9521 case CAM_ISO_MODE_800:
9522 sensitivity = 800;
9523 break;
9524 case CAM_ISO_MODE_1600:
9525 sensitivity = 1600;
9526 break;
9527 default:
9528 sensitivity = -1;
9529 break;
9530 }
9531 return sensitivity;
9532}
9533
9534/*===========================================================================
9535 * FUNCTION : getCamInfo
9536 *
9537 * DESCRIPTION: query camera capabilities
9538 *
9539 * PARAMETERS :
9540 * @cameraId : camera Id
9541 * @info : camera info struct to be filled in with camera capabilities
9542 *
9543 * RETURN : int type of status
9544 * NO_ERROR -- success
9545 * none-zero failure code
9546 *==========================================================================*/
9547int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9548 struct camera_info *info)
9549{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009550 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009551 int rc = 0;
9552
9553 pthread_mutex_lock(&gCamLock);
9554 if (NULL == gCamCapability[cameraId]) {
9555 rc = initCapabilities(cameraId);
9556 if (rc < 0) {
9557 pthread_mutex_unlock(&gCamLock);
9558 return rc;
9559 }
9560 }
9561
9562 if (NULL == gStaticMetadata[cameraId]) {
9563 rc = initStaticMetadata(cameraId);
9564 if (rc < 0) {
9565 pthread_mutex_unlock(&gCamLock);
9566 return rc;
9567 }
9568 }
9569
9570 switch(gCamCapability[cameraId]->position) {
9571 case CAM_POSITION_BACK:
9572 case CAM_POSITION_BACK_AUX:
9573 info->facing = CAMERA_FACING_BACK;
9574 break;
9575
9576 case CAM_POSITION_FRONT:
9577 case CAM_POSITION_FRONT_AUX:
9578 info->facing = CAMERA_FACING_FRONT;
9579 break;
9580
9581 default:
9582 LOGE("Unknown position type %d for camera id:%d",
9583 gCamCapability[cameraId]->position, cameraId);
9584 rc = -1;
9585 break;
9586 }
9587
9588
9589 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009590#ifndef USE_HAL_3_3
9591 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9592#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009593 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009594#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009595 info->static_camera_characteristics = gStaticMetadata[cameraId];
9596
9597 //For now assume both cameras can operate independently.
9598 info->conflicting_devices = NULL;
9599 info->conflicting_devices_length = 0;
9600
9601 //resource cost is 100 * MIN(1.0, m/M),
9602 //where m is throughput requirement with maximum stream configuration
9603 //and M is CPP maximum throughput.
9604 float max_fps = 0.0;
9605 for (uint32_t i = 0;
9606 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9607 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9608 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9609 }
9610 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9611 gCamCapability[cameraId]->active_array_size.width *
9612 gCamCapability[cameraId]->active_array_size.height * max_fps /
9613 gCamCapability[cameraId]->max_pixel_bandwidth;
9614 info->resource_cost = 100 * MIN(1.0, ratio);
9615 LOGI("camera %d resource cost is %d", cameraId,
9616 info->resource_cost);
9617
9618 pthread_mutex_unlock(&gCamLock);
9619 return rc;
9620}
9621
9622/*===========================================================================
9623 * FUNCTION : translateCapabilityToMetadata
9624 *
9625 * DESCRIPTION: translate the capability into camera_metadata_t
9626 *
9627 * PARAMETERS : type of the request
9628 *
9629 *
9630 * RETURN : success: camera_metadata_t*
9631 * failure: NULL
9632 *
9633 *==========================================================================*/
9634camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9635{
9636 if (mDefaultMetadata[type] != NULL) {
9637 return mDefaultMetadata[type];
9638 }
9639 //first time we are handling this request
9640 //fill up the metadata structure using the wrapper class
9641 CameraMetadata settings;
9642 //translate from cam_capability_t to camera_metadata_tag_t
9643 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9644 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9645 int32_t defaultRequestID = 0;
9646 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9647
9648 /* OIS disable */
9649 char ois_prop[PROPERTY_VALUE_MAX];
9650 memset(ois_prop, 0, sizeof(ois_prop));
9651 property_get("persist.camera.ois.disable", ois_prop, "0");
9652 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9653
9654 /* Force video to use OIS */
9655 char videoOisProp[PROPERTY_VALUE_MAX];
9656 memset(videoOisProp, 0, sizeof(videoOisProp));
9657 property_get("persist.camera.ois.video", videoOisProp, "1");
9658 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009659
9660 // Hybrid AE enable/disable
9661 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9662 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9663 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9664 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9665
Thierry Strudel3d639192016-09-09 11:52:26 -07009666 uint8_t controlIntent = 0;
9667 uint8_t focusMode;
9668 uint8_t vsMode;
9669 uint8_t optStabMode;
9670 uint8_t cacMode;
9671 uint8_t edge_mode;
9672 uint8_t noise_red_mode;
9673 uint8_t tonemap_mode;
9674 bool highQualityModeEntryAvailable = FALSE;
9675 bool fastModeEntryAvailable = FALSE;
9676 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9677 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009678
9679 char videoHdrProp[PROPERTY_VALUE_MAX];
9680 memset(videoHdrProp, 0, sizeof(videoHdrProp));
9681 property_get("persist.camera.hdr.video", videoHdrProp, "0");
9682 uint8_t hdr_mode = (uint8_t)atoi(videoHdrProp);
9683
Thierry Strudel3d639192016-09-09 11:52:26 -07009684 switch (type) {
9685 case CAMERA3_TEMPLATE_PREVIEW:
9686 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9687 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9688 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9689 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9690 edge_mode = ANDROID_EDGE_MODE_FAST;
9691 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9692 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9693 break;
9694 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9695 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9696 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9697 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9698 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9699 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9700 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9701 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9702 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9703 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9704 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9705 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9706 highQualityModeEntryAvailable = TRUE;
9707 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9708 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9709 fastModeEntryAvailable = TRUE;
9710 }
9711 }
9712 if (highQualityModeEntryAvailable) {
9713 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9714 } else if (fastModeEntryAvailable) {
9715 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9716 }
9717 break;
9718 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9719 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9720 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9721 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009722 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9723 edge_mode = ANDROID_EDGE_MODE_FAST;
9724 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9725 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9726 if (forceVideoOis)
9727 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009728 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009729 break;
9730 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9731 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9732 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9733 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009734 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9735 edge_mode = ANDROID_EDGE_MODE_FAST;
9736 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9737 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9738 if (forceVideoOis)
9739 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
Mansoor Aftabea39eba2017-01-26 14:58:25 -08009740 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009741 break;
9742 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9743 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9744 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9745 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9746 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9747 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9748 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9749 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9750 break;
9751 case CAMERA3_TEMPLATE_MANUAL:
9752 edge_mode = ANDROID_EDGE_MODE_FAST;
9753 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9754 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9755 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9756 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9757 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9759 break;
9760 default:
9761 edge_mode = ANDROID_EDGE_MODE_FAST;
9762 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9763 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9764 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9765 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9766 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9767 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9768 break;
9769 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009770 // Set CAC to OFF if underlying device doesn't support
9771 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9772 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9773 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009774 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9775 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9776 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9777 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9778 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9779 }
9780 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9781
9782 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9783 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9784 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9785 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9786 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9787 || ois_disable)
9788 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9789 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9790
9791 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9792 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9793
9794 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9795 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9796
9797 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9798 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9799
9800 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9801 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9802
9803 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9804 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9805
9806 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9807 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9808
9809 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9810 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9811
9812 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9813 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9814
9815 /*flash*/
9816 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9817 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9818
9819 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9820 settings.update(ANDROID_FLASH_FIRING_POWER,
9821 &flashFiringLevel, 1);
9822
9823 /* lens */
9824 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9825 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9826
9827 if (gCamCapability[mCameraId]->filter_densities_count) {
9828 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9829 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9830 gCamCapability[mCameraId]->filter_densities_count);
9831 }
9832
9833 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9834 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9835
9836 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9837 float default_focus_distance = 0;
9838 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9839 }
9840
9841 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9842 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9843
9844 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9845 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9846
9847 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9848 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9849
9850 /* face detection (default to OFF) */
9851 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9852 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9853
9854 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9855 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9856
9857 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9858 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9859
9860 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9861 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9862
9863 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9864 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9865
9866 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9867 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9868
9869 /* Exposure time(Update the Min Exposure Time)*/
9870 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9871 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9872
9873 /* frame duration */
9874 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9875 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9876
9877 /* sensitivity */
9878 static const int32_t default_sensitivity = 100;
9879 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009880#ifndef USE_HAL_3_3
9881 static const int32_t default_isp_sensitivity =
9882 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9883 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9884#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009885
9886 /*edge mode*/
9887 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9888
9889 /*noise reduction mode*/
9890 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9891
9892 /*color correction mode*/
9893 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9894 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9895
9896 /*transform matrix mode*/
9897 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9898
9899 int32_t scaler_crop_region[4];
9900 scaler_crop_region[0] = 0;
9901 scaler_crop_region[1] = 0;
9902 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9903 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9904 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9905
9906 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9907 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9908
9909 /*focus distance*/
9910 float focus_distance = 0.0;
9911 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9912
9913 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009914 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009915 float max_range = 0.0;
9916 float max_fixed_fps = 0.0;
9917 int32_t fps_range[2] = {0, 0};
9918 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9919 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009920 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9921 TEMPLATE_MAX_PREVIEW_FPS) {
9922 continue;
9923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009924 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9925 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9926 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9927 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9928 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9929 if (range > max_range) {
9930 fps_range[0] =
9931 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9932 fps_range[1] =
9933 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9934 max_range = range;
9935 }
9936 } else {
9937 if (range < 0.01 && max_fixed_fps <
9938 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9939 fps_range[0] =
9940 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9941 fps_range[1] =
9942 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9943 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9944 }
9945 }
9946 }
9947 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9948
9949 /*precapture trigger*/
9950 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9951 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9952
9953 /*af trigger*/
9954 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9955 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9956
9957 /* ae & af regions */
9958 int32_t active_region[] = {
9959 gCamCapability[mCameraId]->active_array_size.left,
9960 gCamCapability[mCameraId]->active_array_size.top,
9961 gCamCapability[mCameraId]->active_array_size.left +
9962 gCamCapability[mCameraId]->active_array_size.width,
9963 gCamCapability[mCameraId]->active_array_size.top +
9964 gCamCapability[mCameraId]->active_array_size.height,
9965 0};
9966 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9967 sizeof(active_region) / sizeof(active_region[0]));
9968 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9969 sizeof(active_region) / sizeof(active_region[0]));
9970
9971 /* black level lock */
9972 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9973 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9974
9975 /* lens shading map mode */
9976 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9977 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9978 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9979 }
9980 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9981
9982 //special defaults for manual template
9983 if (type == CAMERA3_TEMPLATE_MANUAL) {
9984 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9985 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9986
9987 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9988 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9989
9990 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9991 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9992
9993 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9994 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9995
9996 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9997 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9998
9999 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10000 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10001 }
10002
10003
10004 /* TNR
10005 * We'll use this location to determine which modes TNR will be set.
10006 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10007 * This is not to be confused with linking on a per stream basis that decision
10008 * is still on per-session basis and will be handled as part of config stream
10009 */
10010 uint8_t tnr_enable = 0;
10011
10012 if (m_bTnrPreview || m_bTnrVideo) {
10013
10014 switch (type) {
10015 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10016 tnr_enable = 1;
10017 break;
10018
10019 default:
10020 tnr_enable = 0;
10021 break;
10022 }
10023
10024 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10025 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10026 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10027
10028 LOGD("TNR:%d with process plate %d for template:%d",
10029 tnr_enable, tnr_process_type, type);
10030 }
10031
10032 //Update Link tags to default
10033 int32_t sync_type = CAM_TYPE_STANDALONE;
10034 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10035
10036 int32_t is_main = 0; //this doesn't matter as app should overwrite
10037 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10038
10039 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10040
10041 /* CDS default */
10042 char prop[PROPERTY_VALUE_MAX];
10043 memset(prop, 0, sizeof(prop));
10044 property_get("persist.camera.CDS", prop, "Auto");
10045 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10046 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10047 if (CAM_CDS_MODE_MAX == cds_mode) {
10048 cds_mode = CAM_CDS_MODE_AUTO;
10049 }
10050
10051 /* Disabling CDS in templates which have TNR enabled*/
10052 if (tnr_enable)
10053 cds_mode = CAM_CDS_MODE_OFF;
10054
10055 int32_t mode = cds_mode;
10056 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010057
Thierry Strudel04e026f2016-10-10 11:27:36 -070010058 /* IR Mode Default Off */
10059 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
10060 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
10061
Thierry Strudel269c81a2016-10-12 12:13:59 -070010062 /* Manual Convergence AEC Speed is disabled by default*/
10063 float default_aec_speed = 0;
10064 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10065
10066 /* Manual Convergence AWB Speed is disabled by default*/
10067 float default_awb_speed = 0;
10068 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10069
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010070 // Set instant AEC to normal convergence by default
10071 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10072 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10073
Shuzhen Wang19463d72016-03-08 11:09:52 -080010074 /* hybrid ae */
10075 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10076
Thierry Strudel3d639192016-09-09 11:52:26 -070010077 mDefaultMetadata[type] = settings.release();
10078
10079 return mDefaultMetadata[type];
10080}
10081
10082/*===========================================================================
10083 * FUNCTION : setFrameParameters
10084 *
10085 * DESCRIPTION: set parameters per frame as requested in the metadata from
10086 * framework
10087 *
10088 * PARAMETERS :
10089 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010090 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010091 * @blob_request: Whether this request is a blob request or not
10092 *
10093 * RETURN : success: NO_ERROR
10094 * failure:
10095 *==========================================================================*/
10096int QCamera3HardwareInterface::setFrameParameters(
10097 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010098 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010099 int blob_request,
10100 uint32_t snapshotStreamId)
10101{
10102 /*translate from camera_metadata_t type to parm_type_t*/
10103 int rc = 0;
10104 int32_t hal_version = CAM_HAL_V3;
10105
10106 clear_metadata_buffer(mParameters);
10107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10108 LOGE("Failed to set hal version in the parameters");
10109 return BAD_VALUE;
10110 }
10111
10112 /*we need to update the frame number in the parameters*/
10113 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10114 request->frame_number)) {
10115 LOGE("Failed to set the frame number in the parameters");
10116 return BAD_VALUE;
10117 }
10118
10119 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010121 LOGE("Failed to set stream type mask in the parameters");
10122 return BAD_VALUE;
10123 }
10124
10125 if (mUpdateDebugLevel) {
10126 uint32_t dummyDebugLevel = 0;
10127 /* The value of dummyDebugLevel is irrelavent. On
10128 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10129 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10130 dummyDebugLevel)) {
10131 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10132 return BAD_VALUE;
10133 }
10134 mUpdateDebugLevel = false;
10135 }
10136
10137 if(request->settings != NULL){
10138 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10139 if (blob_request)
10140 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10141 }
10142
10143 return rc;
10144}
10145
10146/*===========================================================================
10147 * FUNCTION : setReprocParameters
10148 *
10149 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10150 * return it.
10151 *
10152 * PARAMETERS :
10153 * @request : request that needs to be serviced
10154 *
10155 * RETURN : success: NO_ERROR
10156 * failure:
10157 *==========================================================================*/
10158int32_t QCamera3HardwareInterface::setReprocParameters(
10159 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10160 uint32_t snapshotStreamId)
10161{
10162 /*translate from camera_metadata_t type to parm_type_t*/
10163 int rc = 0;
10164
10165 if (NULL == request->settings){
10166 LOGE("Reprocess settings cannot be NULL");
10167 return BAD_VALUE;
10168 }
10169
10170 if (NULL == reprocParam) {
10171 LOGE("Invalid reprocessing metadata buffer");
10172 return BAD_VALUE;
10173 }
10174 clear_metadata_buffer(reprocParam);
10175
10176 /*we need to update the frame number in the parameters*/
10177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10178 request->frame_number)) {
10179 LOGE("Failed to set the frame number in the parameters");
10180 return BAD_VALUE;
10181 }
10182
10183 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10184 if (rc < 0) {
10185 LOGE("Failed to translate reproc request");
10186 return rc;
10187 }
10188
10189 CameraMetadata frame_settings;
10190 frame_settings = request->settings;
10191 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10192 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10193 int32_t *crop_count =
10194 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10195 int32_t *crop_data =
10196 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10197 int32_t *roi_map =
10198 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10199 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10200 cam_crop_data_t crop_meta;
10201 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10202 crop_meta.num_of_streams = 1;
10203 crop_meta.crop_info[0].crop.left = crop_data[0];
10204 crop_meta.crop_info[0].crop.top = crop_data[1];
10205 crop_meta.crop_info[0].crop.width = crop_data[2];
10206 crop_meta.crop_info[0].crop.height = crop_data[3];
10207
10208 crop_meta.crop_info[0].roi_map.left =
10209 roi_map[0];
10210 crop_meta.crop_info[0].roi_map.top =
10211 roi_map[1];
10212 crop_meta.crop_info[0].roi_map.width =
10213 roi_map[2];
10214 crop_meta.crop_info[0].roi_map.height =
10215 roi_map[3];
10216
10217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10218 rc = BAD_VALUE;
10219 }
10220 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10221 request->input_buffer->stream,
10222 crop_meta.crop_info[0].crop.left,
10223 crop_meta.crop_info[0].crop.top,
10224 crop_meta.crop_info[0].crop.width,
10225 crop_meta.crop_info[0].crop.height);
10226 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10227 request->input_buffer->stream,
10228 crop_meta.crop_info[0].roi_map.left,
10229 crop_meta.crop_info[0].roi_map.top,
10230 crop_meta.crop_info[0].roi_map.width,
10231 crop_meta.crop_info[0].roi_map.height);
10232 } else {
10233 LOGE("Invalid reprocess crop count %d!", *crop_count);
10234 }
10235 } else {
10236 LOGE("No crop data from matching output stream");
10237 }
10238
10239 /* These settings are not needed for regular requests so handle them specially for
10240 reprocess requests; information needed for EXIF tags */
10241 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10242 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10243 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10244 if (NAME_NOT_FOUND != val) {
10245 uint32_t flashMode = (uint32_t)val;
10246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10247 rc = BAD_VALUE;
10248 }
10249 } else {
10250 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10251 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10252 }
10253 } else {
10254 LOGH("No flash mode in reprocess settings");
10255 }
10256
10257 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10258 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10260 rc = BAD_VALUE;
10261 }
10262 } else {
10263 LOGH("No flash state in reprocess settings");
10264 }
10265
10266 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10267 uint8_t *reprocessFlags =
10268 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10270 *reprocessFlags)) {
10271 rc = BAD_VALUE;
10272 }
10273 }
10274
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010275 // Add metadata which reprocess needs
10276 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10277 cam_reprocess_info_t *repro_info =
10278 (cam_reprocess_info_t *)frame_settings.find
10279 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010280 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010281 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010282 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010283 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010284 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010285 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010286 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010287 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010288 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010289 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010290 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010291 repro_info->pipeline_flip);
10292 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10293 repro_info->af_roi);
10294 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10295 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010296 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10297 CAM_INTF_PARM_ROTATION metadata then has been added in
10298 translateToHalMetadata. HAL need to keep this new rotation
10299 metadata. Otherwise, the old rotation info saved in the vendor tag
10300 would be used */
10301 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10302 CAM_INTF_PARM_ROTATION, reprocParam) {
10303 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10304 } else {
10305 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010306 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010307 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 }
10309
10310 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10311 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10312 roi.width and roi.height would be the final JPEG size.
10313 For now, HAL only checks this for reprocess request */
10314 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10315 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10316 uint8_t *enable =
10317 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10318 if (*enable == TRUE) {
10319 int32_t *crop_data =
10320 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10321 cam_stream_crop_info_t crop_meta;
10322 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10323 crop_meta.stream_id = 0;
10324 crop_meta.crop.left = crop_data[0];
10325 crop_meta.crop.top = crop_data[1];
10326 crop_meta.crop.width = crop_data[2];
10327 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010328 // The JPEG crop roi should match cpp output size
10329 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10330 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10331 crop_meta.roi_map.left = 0;
10332 crop_meta.roi_map.top = 0;
10333 crop_meta.roi_map.width = cpp_crop->crop.width;
10334 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010335 }
10336 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10337 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010338 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010339 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010340 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10341 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010342 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010343 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10344
10345 // Add JPEG scale information
10346 cam_dimension_t scale_dim;
10347 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10348 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10349 int32_t *roi =
10350 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10351 scale_dim.width = roi[2];
10352 scale_dim.height = roi[3];
10353 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10354 scale_dim);
10355 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10356 scale_dim.width, scale_dim.height, mCameraId);
10357 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010358 }
10359 }
10360
10361 return rc;
10362}
10363
10364/*===========================================================================
10365 * FUNCTION : saveRequestSettings
10366 *
10367 * DESCRIPTION: Add any settings that might have changed to the request settings
10368 * and save the settings to be applied on the frame
10369 *
10370 * PARAMETERS :
10371 * @jpegMetadata : the extracted and/or modified jpeg metadata
10372 * @request : request with initial settings
10373 *
10374 * RETURN :
10375 * camera_metadata_t* : pointer to the saved request settings
10376 *==========================================================================*/
10377camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10378 const CameraMetadata &jpegMetadata,
10379 camera3_capture_request_t *request)
10380{
10381 camera_metadata_t *resultMetadata;
10382 CameraMetadata camMetadata;
10383 camMetadata = request->settings;
10384
10385 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10386 int32_t thumbnail_size[2];
10387 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10388 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10389 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10390 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10391 }
10392
10393 if (request->input_buffer != NULL) {
10394 uint8_t reprocessFlags = 1;
10395 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10396 (uint8_t*)&reprocessFlags,
10397 sizeof(reprocessFlags));
10398 }
10399
10400 resultMetadata = camMetadata.release();
10401 return resultMetadata;
10402}
10403
10404/*===========================================================================
10405 * FUNCTION : setHalFpsRange
10406 *
10407 * DESCRIPTION: set FPS range parameter
10408 *
10409 *
10410 * PARAMETERS :
10411 * @settings : Metadata from framework
10412 * @hal_metadata: Metadata buffer
10413 *
10414 *
10415 * RETURN : success: NO_ERROR
10416 * failure:
10417 *==========================================================================*/
10418int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10419 metadata_buffer_t *hal_metadata)
10420{
10421 int32_t rc = NO_ERROR;
10422 cam_fps_range_t fps_range;
10423 fps_range.min_fps = (float)
10424 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10425 fps_range.max_fps = (float)
10426 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10427 fps_range.video_min_fps = fps_range.min_fps;
10428 fps_range.video_max_fps = fps_range.max_fps;
10429
10430 LOGD("aeTargetFpsRange fps: [%f %f]",
10431 fps_range.min_fps, fps_range.max_fps);
10432 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10433 * follows:
10434 * ---------------------------------------------------------------|
10435 * Video stream is absent in configure_streams |
10436 * (Camcorder preview before the first video record |
10437 * ---------------------------------------------------------------|
10438 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10439 * | | | vid_min/max_fps|
10440 * ---------------------------------------------------------------|
10441 * NO | [ 30, 240] | 240 | [240, 240] |
10442 * |-------------|-------------|----------------|
10443 * | [240, 240] | 240 | [240, 240] |
10444 * ---------------------------------------------------------------|
10445 * Video stream is present in configure_streams |
10446 * ---------------------------------------------------------------|
10447 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10448 * | | | vid_min/max_fps|
10449 * ---------------------------------------------------------------|
10450 * NO | [ 30, 240] | 240 | [240, 240] |
10451 * (camcorder prev |-------------|-------------|----------------|
10452 * after video rec | [240, 240] | 240 | [240, 240] |
10453 * is stopped) | | | |
10454 * ---------------------------------------------------------------|
10455 * YES | [ 30, 240] | 240 | [240, 240] |
10456 * |-------------|-------------|----------------|
10457 * | [240, 240] | 240 | [240, 240] |
10458 * ---------------------------------------------------------------|
10459 * When Video stream is absent in configure_streams,
10460 * preview fps = sensor_fps / batchsize
10461 * Eg: for 240fps at batchSize 4, preview = 60fps
10462 * for 120fps at batchSize 4, preview = 30fps
10463 *
10464 * When video stream is present in configure_streams, preview fps is as per
10465 * the ratio of preview buffers to video buffers requested in process
10466 * capture request
10467 */
10468 mBatchSize = 0;
10469 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10470 fps_range.min_fps = fps_range.video_max_fps;
10471 fps_range.video_min_fps = fps_range.video_max_fps;
10472 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10473 fps_range.max_fps);
10474 if (NAME_NOT_FOUND != val) {
10475 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10476 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10477 return BAD_VALUE;
10478 }
10479
10480 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10481 /* If batchmode is currently in progress and the fps changes,
10482 * set the flag to restart the sensor */
10483 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10484 (mHFRVideoFps != fps_range.max_fps)) {
10485 mNeedSensorRestart = true;
10486 }
10487 mHFRVideoFps = fps_range.max_fps;
10488 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10489 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10490 mBatchSize = MAX_HFR_BATCH_SIZE;
10491 }
10492 }
10493 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10494
10495 }
10496 } else {
10497 /* HFR mode is session param in backend/ISP. This should be reset when
10498 * in non-HFR mode */
10499 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10501 return BAD_VALUE;
10502 }
10503 }
10504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10505 return BAD_VALUE;
10506 }
10507 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10508 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10509 return rc;
10510}
10511
10512/*===========================================================================
10513 * FUNCTION : translateToHalMetadata
10514 *
10515 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10516 *
10517 *
10518 * PARAMETERS :
10519 * @request : request sent from framework
10520 *
10521 *
10522 * RETURN : success: NO_ERROR
10523 * failure:
10524 *==========================================================================*/
10525int QCamera3HardwareInterface::translateToHalMetadata
10526 (const camera3_capture_request_t *request,
10527 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010528 uint32_t snapshotStreamId) {
10529 if (request == nullptr || hal_metadata == nullptr) {
10530 return BAD_VALUE;
10531 }
10532
10533 int64_t minFrameDuration = getMinFrameDuration(request);
10534
10535 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
10536 minFrameDuration);
10537}
10538
10539int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
10540 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
10541 uint32_t snapshotStreamId, int64_t minFrameDuration) {
10542
Thierry Strudel3d639192016-09-09 11:52:26 -070010543 int rc = 0;
10544 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080010545 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070010546
10547 /* Do not change the order of the following list unless you know what you are
10548 * doing.
10549 * The order is laid out in such a way that parameters in the front of the table
10550 * may be used to override the parameters later in the table. Examples are:
10551 * 1. META_MODE should precede AEC/AWB/AF MODE
10552 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10553 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10554 * 4. Any mode should precede it's corresponding settings
10555 */
10556 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10557 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10559 rc = BAD_VALUE;
10560 }
10561 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10562 if (rc != NO_ERROR) {
10563 LOGE("extractSceneMode failed");
10564 }
10565 }
10566
10567 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10568 uint8_t fwk_aeMode =
10569 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10570 uint8_t aeMode;
10571 int32_t redeye;
10572
10573 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10574 aeMode = CAM_AE_MODE_OFF;
10575 } else {
10576 aeMode = CAM_AE_MODE_ON;
10577 }
10578 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10579 redeye = 1;
10580 } else {
10581 redeye = 0;
10582 }
10583
10584 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10585 fwk_aeMode);
10586 if (NAME_NOT_FOUND != val) {
10587 int32_t flashMode = (int32_t)val;
10588 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10589 }
10590
10591 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10593 rc = BAD_VALUE;
10594 }
10595 }
10596
10597 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10598 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10599 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10600 fwk_whiteLevel);
10601 if (NAME_NOT_FOUND != val) {
10602 uint8_t whiteLevel = (uint8_t)val;
10603 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10604 rc = BAD_VALUE;
10605 }
10606 }
10607 }
10608
10609 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10610 uint8_t fwk_cacMode =
10611 frame_settings.find(
10612 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10613 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10614 fwk_cacMode);
10615 if (NAME_NOT_FOUND != val) {
10616 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10617 bool entryAvailable = FALSE;
10618 // Check whether Frameworks set CAC mode is supported in device or not
10619 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10620 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10621 entryAvailable = TRUE;
10622 break;
10623 }
10624 }
10625 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10626 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10627 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10628 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10629 if (entryAvailable == FALSE) {
10630 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10631 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10632 } else {
10633 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10634 // High is not supported and so set the FAST as spec say's underlying
10635 // device implementation can be the same for both modes.
10636 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10637 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10638 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10639 // in order to avoid the fps drop due to high quality
10640 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10641 } else {
10642 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10643 }
10644 }
10645 }
10646 LOGD("Final cacMode is %d", cacMode);
10647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10648 rc = BAD_VALUE;
10649 }
10650 } else {
10651 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10652 }
10653 }
10654
10655 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10656 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10657 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10658 fwk_focusMode);
10659 if (NAME_NOT_FOUND != val) {
10660 uint8_t focusMode = (uint8_t)val;
10661 LOGD("set focus mode %d", focusMode);
10662 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10663 rc = BAD_VALUE;
10664 }
10665 }
10666 }
10667
10668 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10669 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10671 focalDistance)) {
10672 rc = BAD_VALUE;
10673 }
10674 }
10675
10676 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10677 uint8_t fwk_antibandingMode =
10678 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10679 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10680 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10681 if (NAME_NOT_FOUND != val) {
10682 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010683 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10684 if (m60HzZone) {
10685 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10686 } else {
10687 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10688 }
10689 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10691 hal_antibandingMode)) {
10692 rc = BAD_VALUE;
10693 }
10694 }
10695 }
10696
10697 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10698 int32_t expCompensation = frame_settings.find(
10699 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10700 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10701 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10702 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10703 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010704 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10706 expCompensation)) {
10707 rc = BAD_VALUE;
10708 }
10709 }
10710
10711 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10712 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10714 rc = BAD_VALUE;
10715 }
10716 }
10717 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10718 rc = setHalFpsRange(frame_settings, hal_metadata);
10719 if (rc != NO_ERROR) {
10720 LOGE("setHalFpsRange failed");
10721 }
10722 }
10723
10724 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10725 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10727 rc = BAD_VALUE;
10728 }
10729 }
10730
10731 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10732 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10733 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10734 fwk_effectMode);
10735 if (NAME_NOT_FOUND != val) {
10736 uint8_t effectMode = (uint8_t)val;
10737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10738 rc = BAD_VALUE;
10739 }
10740 }
10741 }
10742
10743 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10744 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10745 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10746 colorCorrectMode)) {
10747 rc = BAD_VALUE;
10748 }
10749 }
10750
10751 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10752 cam_color_correct_gains_t colorCorrectGains;
10753 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10754 colorCorrectGains.gains[i] =
10755 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10756 }
10757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10758 colorCorrectGains)) {
10759 rc = BAD_VALUE;
10760 }
10761 }
10762
10763 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10764 cam_color_correct_matrix_t colorCorrectTransform;
10765 cam_rational_type_t transform_elem;
10766 size_t num = 0;
10767 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10768 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10769 transform_elem.numerator =
10770 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10771 transform_elem.denominator =
10772 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10773 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10774 num++;
10775 }
10776 }
10777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10778 colorCorrectTransform)) {
10779 rc = BAD_VALUE;
10780 }
10781 }
10782
10783 cam_trigger_t aecTrigger;
10784 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10785 aecTrigger.trigger_id = -1;
10786 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10787 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10788 aecTrigger.trigger =
10789 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10790 aecTrigger.trigger_id =
10791 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10792 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10793 aecTrigger)) {
10794 rc = BAD_VALUE;
10795 }
10796 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10797 aecTrigger.trigger, aecTrigger.trigger_id);
10798 }
10799
10800 /*af_trigger must come with a trigger id*/
10801 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10802 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10803 cam_trigger_t af_trigger;
10804 af_trigger.trigger =
10805 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10806 af_trigger.trigger_id =
10807 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10809 rc = BAD_VALUE;
10810 }
10811 LOGD("AfTrigger: %d AfTriggerID: %d",
10812 af_trigger.trigger, af_trigger.trigger_id);
10813 }
10814
10815 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10816 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10818 rc = BAD_VALUE;
10819 }
10820 }
10821 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10822 cam_edge_application_t edge_application;
10823 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10824 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10825 edge_application.sharpness = 0;
10826 } else {
10827 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10828 }
10829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10830 rc = BAD_VALUE;
10831 }
10832 }
10833
10834 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10835 int32_t respectFlashMode = 1;
10836 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10837 uint8_t fwk_aeMode =
10838 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10839 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10840 respectFlashMode = 0;
10841 LOGH("AE Mode controls flash, ignore android.flash.mode");
10842 }
10843 }
10844 if (respectFlashMode) {
10845 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10846 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10847 LOGH("flash mode after mapping %d", val);
10848 // To check: CAM_INTF_META_FLASH_MODE usage
10849 if (NAME_NOT_FOUND != val) {
10850 uint8_t flashMode = (uint8_t)val;
10851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10852 rc = BAD_VALUE;
10853 }
10854 }
10855 }
10856 }
10857
10858 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10859 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10861 rc = BAD_VALUE;
10862 }
10863 }
10864
10865 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10866 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10868 flashFiringTime)) {
10869 rc = BAD_VALUE;
10870 }
10871 }
10872
10873 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10874 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10876 hotPixelMode)) {
10877 rc = BAD_VALUE;
10878 }
10879 }
10880
10881 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10882 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10883 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10884 lensAperture)) {
10885 rc = BAD_VALUE;
10886 }
10887 }
10888
10889 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10890 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10892 filterDensity)) {
10893 rc = BAD_VALUE;
10894 }
10895 }
10896
10897 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10898 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10899 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10900 focalLength)) {
10901 rc = BAD_VALUE;
10902 }
10903 }
10904
10905 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10906 uint8_t optStabMode =
10907 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10908 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10909 optStabMode)) {
10910 rc = BAD_VALUE;
10911 }
10912 }
10913
10914 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10915 uint8_t videoStabMode =
10916 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10917 LOGD("videoStabMode from APP = %d", videoStabMode);
10918 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10919 videoStabMode)) {
10920 rc = BAD_VALUE;
10921 }
10922 }
10923
10924
10925 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10926 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10928 noiseRedMode)) {
10929 rc = BAD_VALUE;
10930 }
10931 }
10932
10933 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10934 float reprocessEffectiveExposureFactor =
10935 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10937 reprocessEffectiveExposureFactor)) {
10938 rc = BAD_VALUE;
10939 }
10940 }
10941
10942 cam_crop_region_t scalerCropRegion;
10943 bool scalerCropSet = false;
10944 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10945 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10946 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10947 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10948 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10949
10950 // Map coordinate system from active array to sensor output.
10951 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10952 scalerCropRegion.width, scalerCropRegion.height);
10953
10954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10955 scalerCropRegion)) {
10956 rc = BAD_VALUE;
10957 }
10958 scalerCropSet = true;
10959 }
10960
10961 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10962 int64_t sensorExpTime =
10963 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10964 LOGD("setting sensorExpTime %lld", sensorExpTime);
10965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10966 sensorExpTime)) {
10967 rc = BAD_VALUE;
10968 }
10969 }
10970
10971 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10972 int64_t sensorFrameDuration =
10973 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070010974 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10975 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10976 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10977 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10979 sensorFrameDuration)) {
10980 rc = BAD_VALUE;
10981 }
10982 }
10983
10984 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10985 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10986 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10987 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10988 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10989 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10990 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10992 sensorSensitivity)) {
10993 rc = BAD_VALUE;
10994 }
10995 }
10996
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010997#ifndef USE_HAL_3_3
10998 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10999 int32_t ispSensitivity =
11000 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11001 if (ispSensitivity <
11002 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11003 ispSensitivity =
11004 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11005 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11006 }
11007 if (ispSensitivity >
11008 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11009 ispSensitivity =
11010 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11011 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11012 }
11013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11014 ispSensitivity)) {
11015 rc = BAD_VALUE;
11016 }
11017 }
11018#endif
11019
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11021 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11023 rc = BAD_VALUE;
11024 }
11025 }
11026
11027 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11028 uint8_t fwk_facedetectMode =
11029 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11030
11031 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11032 fwk_facedetectMode);
11033
11034 if (NAME_NOT_FOUND != val) {
11035 uint8_t facedetectMode = (uint8_t)val;
11036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11037 facedetectMode)) {
11038 rc = BAD_VALUE;
11039 }
11040 }
11041 }
11042
11043 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
11044 uint8_t histogramMode =
11045 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
11046 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11047 histogramMode)) {
11048 rc = BAD_VALUE;
11049 }
11050 }
11051
11052 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11053 uint8_t sharpnessMapMode =
11054 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11055 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11056 sharpnessMapMode)) {
11057 rc = BAD_VALUE;
11058 }
11059 }
11060
11061 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11062 uint8_t tonemapMode =
11063 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11065 rc = BAD_VALUE;
11066 }
11067 }
11068 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11069 /*All tonemap channels will have the same number of points*/
11070 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11071 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11072 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11073 cam_rgb_tonemap_curves tonemapCurves;
11074 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11075 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11076 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11077 tonemapCurves.tonemap_points_cnt,
11078 CAM_MAX_TONEMAP_CURVE_SIZE);
11079 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11080 }
11081
11082 /* ch0 = G*/
11083 size_t point = 0;
11084 cam_tonemap_curve_t tonemapCurveGreen;
11085 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11086 for (size_t j = 0; j < 2; j++) {
11087 tonemapCurveGreen.tonemap_points[i][j] =
11088 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11089 point++;
11090 }
11091 }
11092 tonemapCurves.curves[0] = tonemapCurveGreen;
11093
11094 /* ch 1 = B */
11095 point = 0;
11096 cam_tonemap_curve_t tonemapCurveBlue;
11097 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11098 for (size_t j = 0; j < 2; j++) {
11099 tonemapCurveBlue.tonemap_points[i][j] =
11100 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11101 point++;
11102 }
11103 }
11104 tonemapCurves.curves[1] = tonemapCurveBlue;
11105
11106 /* ch 2 = R */
11107 point = 0;
11108 cam_tonemap_curve_t tonemapCurveRed;
11109 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11110 for (size_t j = 0; j < 2; j++) {
11111 tonemapCurveRed.tonemap_points[i][j] =
11112 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11113 point++;
11114 }
11115 }
11116 tonemapCurves.curves[2] = tonemapCurveRed;
11117
11118 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11119 tonemapCurves)) {
11120 rc = BAD_VALUE;
11121 }
11122 }
11123
11124 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11125 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11127 captureIntent)) {
11128 rc = BAD_VALUE;
11129 }
11130 }
11131
11132 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11133 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11135 blackLevelLock)) {
11136 rc = BAD_VALUE;
11137 }
11138 }
11139
11140 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11141 uint8_t lensShadingMapMode =
11142 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11144 lensShadingMapMode)) {
11145 rc = BAD_VALUE;
11146 }
11147 }
11148
11149 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11150 cam_area_t roi;
11151 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011152 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011153
11154 // Map coordinate system from active array to sensor output.
11155 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11156 roi.rect.height);
11157
11158 if (scalerCropSet) {
11159 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11160 }
11161 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11162 rc = BAD_VALUE;
11163 }
11164 }
11165
11166 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11167 cam_area_t roi;
11168 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011169 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070011170
11171 // Map coordinate system from active array to sensor output.
11172 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11173 roi.rect.height);
11174
11175 if (scalerCropSet) {
11176 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11177 }
11178 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11179 rc = BAD_VALUE;
11180 }
11181 }
11182
11183 // CDS for non-HFR non-video mode
11184 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11185 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11186 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11187 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11188 LOGE("Invalid CDS mode %d!", *fwk_cds);
11189 } else {
11190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11191 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11192 rc = BAD_VALUE;
11193 }
11194 }
11195 }
11196
Thierry Strudel04e026f2016-10-10 11:27:36 -070011197 // Video HDR
11198 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
11199 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
11200 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11201 rc = setVideoHdrMode(mParameters, vhdr);
11202 if (rc != NO_ERROR) {
11203 LOGE("setVideoHDR is failed");
11204 }
11205 }
11206
11207 //IR
11208 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11209 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11210 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
11211 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11212 LOGE("Invalid IR mode %d!", fwk_ir);
11213 } else {
11214 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11215 CAM_INTF_META_IR_MODE, fwk_ir)) {
11216 rc = BAD_VALUE;
11217 }
11218 }
11219 }
11220
Thierry Strudel269c81a2016-10-12 12:13:59 -070011221 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11222 float aec_speed;
11223 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11224 LOGD("AEC Speed :%f", aec_speed);
11225 if ( aec_speed < 0 ) {
11226 LOGE("Invalid AEC mode %f!", aec_speed);
11227 } else {
11228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11229 aec_speed)) {
11230 rc = BAD_VALUE;
11231 }
11232 }
11233 }
11234
11235 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11236 float awb_speed;
11237 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11238 LOGD("AWB Speed :%f", awb_speed);
11239 if ( awb_speed < 0 ) {
11240 LOGE("Invalid AWB mode %f!", awb_speed);
11241 } else {
11242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11243 awb_speed)) {
11244 rc = BAD_VALUE;
11245 }
11246 }
11247 }
11248
Thierry Strudel3d639192016-09-09 11:52:26 -070011249 // TNR
11250 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11251 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11252 uint8_t b_TnrRequested = 0;
11253 cam_denoise_param_t tnr;
11254 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11255 tnr.process_plates =
11256 (cam_denoise_process_type_t)frame_settings.find(
11257 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11258 b_TnrRequested = tnr.denoise_enable;
11259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11260 rc = BAD_VALUE;
11261 }
11262 }
11263
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011264 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
11265 int32_t* exposure_metering_mode =
11266 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
11267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11268 *exposure_metering_mode)) {
11269 rc = BAD_VALUE;
11270 }
11271 }
11272
Thierry Strudel3d639192016-09-09 11:52:26 -070011273 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11274 int32_t fwk_testPatternMode =
11275 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11276 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11277 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11278
11279 if (NAME_NOT_FOUND != testPatternMode) {
11280 cam_test_pattern_data_t testPatternData;
11281 memset(&testPatternData, 0, sizeof(testPatternData));
11282 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11283 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11284 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11285 int32_t *fwk_testPatternData =
11286 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11287 testPatternData.r = fwk_testPatternData[0];
11288 testPatternData.b = fwk_testPatternData[3];
11289 switch (gCamCapability[mCameraId]->color_arrangement) {
11290 case CAM_FILTER_ARRANGEMENT_RGGB:
11291 case CAM_FILTER_ARRANGEMENT_GRBG:
11292 testPatternData.gr = fwk_testPatternData[1];
11293 testPatternData.gb = fwk_testPatternData[2];
11294 break;
11295 case CAM_FILTER_ARRANGEMENT_GBRG:
11296 case CAM_FILTER_ARRANGEMENT_BGGR:
11297 testPatternData.gr = fwk_testPatternData[2];
11298 testPatternData.gb = fwk_testPatternData[1];
11299 break;
11300 default:
11301 LOGE("color arrangement %d is not supported",
11302 gCamCapability[mCameraId]->color_arrangement);
11303 break;
11304 }
11305 }
11306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11307 testPatternData)) {
11308 rc = BAD_VALUE;
11309 }
11310 } else {
11311 LOGE("Invalid framework sensor test pattern mode %d",
11312 fwk_testPatternMode);
11313 }
11314 }
11315
11316 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11317 size_t count = 0;
11318 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11319 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11320 gps_coords.data.d, gps_coords.count, count);
11321 if (gps_coords.count != count) {
11322 rc = BAD_VALUE;
11323 }
11324 }
11325
11326 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11327 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11328 size_t count = 0;
11329 const char *gps_methods_src = (const char *)
11330 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11331 memset(gps_methods, '\0', sizeof(gps_methods));
11332 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11333 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11334 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11335 if (GPS_PROCESSING_METHOD_SIZE != count) {
11336 rc = BAD_VALUE;
11337 }
11338 }
11339
11340 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11341 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11342 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11343 gps_timestamp)) {
11344 rc = BAD_VALUE;
11345 }
11346 }
11347
11348 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11349 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11350 cam_rotation_info_t rotation_info;
11351 if (orientation == 0) {
11352 rotation_info.rotation = ROTATE_0;
11353 } else if (orientation == 90) {
11354 rotation_info.rotation = ROTATE_90;
11355 } else if (orientation == 180) {
11356 rotation_info.rotation = ROTATE_180;
11357 } else if (orientation == 270) {
11358 rotation_info.rotation = ROTATE_270;
11359 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011360 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011361 rotation_info.streamId = snapshotStreamId;
11362 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11364 rc = BAD_VALUE;
11365 }
11366 }
11367
11368 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11369 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11371 rc = BAD_VALUE;
11372 }
11373 }
11374
11375 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11376 uint32_t thumb_quality = (uint32_t)
11377 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11378 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11379 thumb_quality)) {
11380 rc = BAD_VALUE;
11381 }
11382 }
11383
11384 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11385 cam_dimension_t dim;
11386 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11387 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11389 rc = BAD_VALUE;
11390 }
11391 }
11392
11393 // Internal metadata
11394 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11395 size_t count = 0;
11396 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11397 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11398 privatedata.data.i32, privatedata.count, count);
11399 if (privatedata.count != count) {
11400 rc = BAD_VALUE;
11401 }
11402 }
11403
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011404 // ISO/Exposure Priority
11405 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11406 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11407 cam_priority_mode_t mode =
11408 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11409 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11410 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11411 use_iso_exp_pty.previewOnly = FALSE;
11412 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11413 use_iso_exp_pty.value = *ptr;
11414
11415 if(CAM_ISO_PRIORITY == mode) {
11416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11417 use_iso_exp_pty)) {
11418 rc = BAD_VALUE;
11419 }
11420 }
11421 else {
11422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11423 use_iso_exp_pty)) {
11424 rc = BAD_VALUE;
11425 }
11426 }
11427 }
11428 }
11429
11430 // Saturation
11431 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11432 int32_t* use_saturation =
11433 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11434 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11435 rc = BAD_VALUE;
11436 }
11437 }
11438
Thierry Strudel3d639192016-09-09 11:52:26 -070011439 // EV step
11440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11441 gCamCapability[mCameraId]->exp_compensation_step)) {
11442 rc = BAD_VALUE;
11443 }
11444
11445 // CDS info
11446 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11447 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11448 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11449
11450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11451 CAM_INTF_META_CDS_DATA, *cdsData)) {
11452 rc = BAD_VALUE;
11453 }
11454 }
11455
Shuzhen Wang19463d72016-03-08 11:09:52 -080011456 // Hybrid AE
11457 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11458 uint8_t *hybrid_ae = (uint8_t *)
11459 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11460
11461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11462 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11463 rc = BAD_VALUE;
11464 }
11465 }
11466
Thierry Strudel3d639192016-09-09 11:52:26 -070011467 return rc;
11468}
11469
11470/*===========================================================================
11471 * FUNCTION : captureResultCb
11472 *
11473 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11474 *
11475 * PARAMETERS :
11476 * @frame : frame information from mm-camera-interface
11477 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11478 * @userdata: userdata
11479 *
11480 * RETURN : NONE
11481 *==========================================================================*/
11482void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11483 camera3_stream_buffer_t *buffer,
11484 uint32_t frame_number, bool isInputBuffer, void *userdata)
11485{
11486 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11487 if (hw == NULL) {
11488 LOGE("Invalid hw %p", hw);
11489 return;
11490 }
11491
11492 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11493 return;
11494}
11495
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011496/*===========================================================================
11497 * FUNCTION : setBufferErrorStatus
11498 *
11499 * DESCRIPTION: Callback handler for channels to report any buffer errors
11500 *
11501 * PARAMETERS :
11502 * @ch : Channel on which buffer error is reported from
11503 * @frame_number : frame number on which buffer error is reported on
11504 * @buffer_status : buffer error status
11505 * @userdata: userdata
11506 *
11507 * RETURN : NONE
11508 *==========================================================================*/
11509void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11510 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11511{
11512 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11513 if (hw == NULL) {
11514 LOGE("Invalid hw %p", hw);
11515 return;
11516 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011517
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011518 hw->setBufferErrorStatus(ch, frame_number, err);
11519 return;
11520}
11521
11522void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11523 uint32_t frameNumber, camera3_buffer_status_t err)
11524{
11525 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11526 pthread_mutex_lock(&mMutex);
11527
11528 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11529 if (req.frame_number != frameNumber)
11530 continue;
11531 for (auto& k : req.mPendingBufferList) {
11532 if(k.stream->priv == ch) {
11533 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11534 }
11535 }
11536 }
11537
11538 pthread_mutex_unlock(&mMutex);
11539 return;
11540}
Thierry Strudel3d639192016-09-09 11:52:26 -070011541/*===========================================================================
11542 * FUNCTION : initialize
11543 *
11544 * DESCRIPTION: Pass framework callback pointers to HAL
11545 *
11546 * PARAMETERS :
11547 *
11548 *
11549 * RETURN : Success : 0
11550 * Failure: -ENODEV
11551 *==========================================================================*/
11552
11553int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11554 const camera3_callback_ops_t *callback_ops)
11555{
11556 LOGD("E");
11557 QCamera3HardwareInterface *hw =
11558 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11559 if (!hw) {
11560 LOGE("NULL camera device");
11561 return -ENODEV;
11562 }
11563
11564 int rc = hw->initialize(callback_ops);
11565 LOGD("X");
11566 return rc;
11567}
11568
11569/*===========================================================================
11570 * FUNCTION : configure_streams
11571 *
11572 * DESCRIPTION:
11573 *
11574 * PARAMETERS :
11575 *
11576 *
11577 * RETURN : Success: 0
11578 * Failure: -EINVAL (if stream configuration is invalid)
11579 * -ENODEV (fatal error)
11580 *==========================================================================*/
11581
11582int QCamera3HardwareInterface::configure_streams(
11583 const struct camera3_device *device,
11584 camera3_stream_configuration_t *stream_list)
11585{
11586 LOGD("E");
11587 QCamera3HardwareInterface *hw =
11588 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11589 if (!hw) {
11590 LOGE("NULL camera device");
11591 return -ENODEV;
11592 }
11593 int rc = hw->configureStreams(stream_list);
11594 LOGD("X");
11595 return rc;
11596}
11597
11598/*===========================================================================
11599 * FUNCTION : construct_default_request_settings
11600 *
11601 * DESCRIPTION: Configure a settings buffer to meet the required use case
11602 *
11603 * PARAMETERS :
11604 *
11605 *
11606 * RETURN : Success: Return valid metadata
11607 * Failure: Return NULL
11608 *==========================================================================*/
11609const camera_metadata_t* QCamera3HardwareInterface::
11610 construct_default_request_settings(const struct camera3_device *device,
11611 int type)
11612{
11613
11614 LOGD("E");
11615 camera_metadata_t* fwk_metadata = NULL;
11616 QCamera3HardwareInterface *hw =
11617 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11618 if (!hw) {
11619 LOGE("NULL camera device");
11620 return NULL;
11621 }
11622
11623 fwk_metadata = hw->translateCapabilityToMetadata(type);
11624
11625 LOGD("X");
11626 return fwk_metadata;
11627}
11628
11629/*===========================================================================
11630 * FUNCTION : process_capture_request
11631 *
11632 * DESCRIPTION:
11633 *
11634 * PARAMETERS :
11635 *
11636 *
11637 * RETURN :
11638 *==========================================================================*/
11639int QCamera3HardwareInterface::process_capture_request(
11640 const struct camera3_device *device,
11641 camera3_capture_request_t *request)
11642{
11643 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011644 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011645 QCamera3HardwareInterface *hw =
11646 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11647 if (!hw) {
11648 LOGE("NULL camera device");
11649 return -EINVAL;
11650 }
11651
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011652 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011653 LOGD("X");
11654 return rc;
11655}
11656
11657/*===========================================================================
11658 * FUNCTION : dump
11659 *
11660 * DESCRIPTION:
11661 *
11662 * PARAMETERS :
11663 *
11664 *
11665 * RETURN :
11666 *==========================================================================*/
11667
11668void QCamera3HardwareInterface::dump(
11669 const struct camera3_device *device, int fd)
11670{
11671 /* Log level property is read when "adb shell dumpsys media.camera" is
11672 called so that the log level can be controlled without restarting
11673 the media server */
11674 getLogLevel();
11675
11676 LOGD("E");
11677 QCamera3HardwareInterface *hw =
11678 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11679 if (!hw) {
11680 LOGE("NULL camera device");
11681 return;
11682 }
11683
11684 hw->dump(fd);
11685 LOGD("X");
11686 return;
11687}
11688
11689/*===========================================================================
11690 * FUNCTION : flush
11691 *
11692 * DESCRIPTION:
11693 *
11694 * PARAMETERS :
11695 *
11696 *
11697 * RETURN :
11698 *==========================================================================*/
11699
11700int QCamera3HardwareInterface::flush(
11701 const struct camera3_device *device)
11702{
11703 int rc;
11704 LOGD("E");
11705 QCamera3HardwareInterface *hw =
11706 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11707 if (!hw) {
11708 LOGE("NULL camera device");
11709 return -EINVAL;
11710 }
11711
11712 pthread_mutex_lock(&hw->mMutex);
11713 // Validate current state
11714 switch (hw->mState) {
11715 case STARTED:
11716 /* valid state */
11717 break;
11718
11719 case ERROR:
11720 pthread_mutex_unlock(&hw->mMutex);
11721 hw->handleCameraDeviceError();
11722 return -ENODEV;
11723
11724 default:
11725 LOGI("Flush returned during state %d", hw->mState);
11726 pthread_mutex_unlock(&hw->mMutex);
11727 return 0;
11728 }
11729 pthread_mutex_unlock(&hw->mMutex);
11730
11731 rc = hw->flush(true /* restart channels */ );
11732 LOGD("X");
11733 return rc;
11734}
11735
11736/*===========================================================================
11737 * FUNCTION : close_camera_device
11738 *
11739 * DESCRIPTION:
11740 *
11741 * PARAMETERS :
11742 *
11743 *
11744 * RETURN :
11745 *==========================================================================*/
11746int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11747{
11748 int ret = NO_ERROR;
11749 QCamera3HardwareInterface *hw =
11750 reinterpret_cast<QCamera3HardwareInterface *>(
11751 reinterpret_cast<camera3_device_t *>(device)->priv);
11752 if (!hw) {
11753 LOGE("NULL camera device");
11754 return BAD_VALUE;
11755 }
11756
11757 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11758 delete hw;
11759 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011760 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011761 return ret;
11762}
11763
11764/*===========================================================================
11765 * FUNCTION : getWaveletDenoiseProcessPlate
11766 *
11767 * DESCRIPTION: query wavelet denoise process plate
11768 *
11769 * PARAMETERS : None
11770 *
11771 * RETURN : WNR prcocess plate value
11772 *==========================================================================*/
11773cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11774{
11775 char prop[PROPERTY_VALUE_MAX];
11776 memset(prop, 0, sizeof(prop));
11777 property_get("persist.denoise.process.plates", prop, "0");
11778 int processPlate = atoi(prop);
11779 switch(processPlate) {
11780 case 0:
11781 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11782 case 1:
11783 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11784 case 2:
11785 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11786 case 3:
11787 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11788 default:
11789 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11790 }
11791}
11792
11793
11794/*===========================================================================
11795 * FUNCTION : getTemporalDenoiseProcessPlate
11796 *
11797 * DESCRIPTION: query temporal denoise process plate
11798 *
11799 * PARAMETERS : None
11800 *
11801 * RETURN : TNR prcocess plate value
11802 *==========================================================================*/
11803cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11804{
11805 char prop[PROPERTY_VALUE_MAX];
11806 memset(prop, 0, sizeof(prop));
11807 property_get("persist.tnr.process.plates", prop, "0");
11808 int processPlate = atoi(prop);
11809 switch(processPlate) {
11810 case 0:
11811 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11812 case 1:
11813 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11814 case 2:
11815 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11816 case 3:
11817 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11818 default:
11819 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11820 }
11821}
11822
11823
11824/*===========================================================================
11825 * FUNCTION : extractSceneMode
11826 *
11827 * DESCRIPTION: Extract scene mode from frameworks set metadata
11828 *
11829 * PARAMETERS :
11830 * @frame_settings: CameraMetadata reference
11831 * @metaMode: ANDROID_CONTORL_MODE
11832 * @hal_metadata: hal metadata structure
11833 *
11834 * RETURN : None
11835 *==========================================================================*/
11836int32_t QCamera3HardwareInterface::extractSceneMode(
11837 const CameraMetadata &frame_settings, uint8_t metaMode,
11838 metadata_buffer_t *hal_metadata)
11839{
11840 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011841 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11842
11843 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
11844 LOGD("Ignoring control mode OFF_KEEP_STATE");
11845 return NO_ERROR;
11846 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011847
11848 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11849 camera_metadata_ro_entry entry =
11850 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11851 if (0 == entry.count)
11852 return rc;
11853
11854 uint8_t fwk_sceneMode = entry.data.u8[0];
11855
11856 int val = lookupHalName(SCENE_MODES_MAP,
11857 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11858 fwk_sceneMode);
11859 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011860 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070011861 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070011862 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011863 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011864
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011865 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
11866 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
11867 }
11868
11869 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
11870 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011871 cam_hdr_param_t hdr_params;
11872 hdr_params.hdr_enable = 1;
11873 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11874 hdr_params.hdr_need_1x = false;
11875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11876 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11877 rc = BAD_VALUE;
11878 }
11879 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011880
Thierry Strudel3d639192016-09-09 11:52:26 -070011881 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11882 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11883 rc = BAD_VALUE;
11884 }
11885 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011886
11887 if (mForceHdrSnapshot) {
11888 cam_hdr_param_t hdr_params;
11889 hdr_params.hdr_enable = 1;
11890 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11891 hdr_params.hdr_need_1x = false;
11892 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11893 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11894 rc = BAD_VALUE;
11895 }
11896 }
11897
Thierry Strudel3d639192016-09-09 11:52:26 -070011898 return rc;
11899}
11900
11901/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011902 * FUNCTION : setVideoHdrMode
11903 *
11904 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11905 *
11906 * PARAMETERS :
11907 * @hal_metadata: hal metadata structure
11908 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11909 *
11910 * RETURN : None
11911 *==========================================================================*/
11912int32_t QCamera3HardwareInterface::setVideoHdrMode(
11913 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11914{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011915 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
11916 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
11917 }
11918
11919 LOGE("Invalid Video HDR mode %d!", vhdr);
11920 return BAD_VALUE;
11921}
11922
11923/*===========================================================================
11924 * FUNCTION : setSensorHDR
11925 *
11926 * DESCRIPTION: Enable/disable sensor HDR.
11927 *
11928 * PARAMETERS :
11929 * @hal_metadata: hal metadata structure
11930 * @enable: boolean whether to enable/disable sensor HDR
11931 *
11932 * RETURN : None
11933 *==========================================================================*/
11934int32_t QCamera3HardwareInterface::setSensorHDR(
11935 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
11936{
Thierry Strudel04e026f2016-10-10 11:27:36 -070011937 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011938 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
11939
11940 if (enable) {
11941 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
11942 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
11943 #ifdef _LE_CAMERA_
11944 //Default to staggered HDR for IOT
11945 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
11946 #else
11947 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
11948 #endif
11949 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
11950 }
11951
11952 bool isSupported = false;
11953 switch (sensor_hdr) {
11954 case CAM_SENSOR_HDR_IN_SENSOR:
11955 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
11956 CAM_QCOM_FEATURE_SENSOR_HDR) {
11957 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011958 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011959 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011960 break;
11961 case CAM_SENSOR_HDR_ZIGZAG:
11962 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
11963 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
11964 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011965 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011966 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011967 break;
11968 case CAM_SENSOR_HDR_STAGGERED:
11969 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
11970 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
11971 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070011972 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070011973 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080011974 break;
11975 case CAM_SENSOR_HDR_OFF:
11976 isSupported = true;
11977 LOGD("Turning off sensor HDR");
11978 break;
11979 default:
11980 LOGE("HDR mode %d not supported", sensor_hdr);
11981 rc = BAD_VALUE;
11982 break;
11983 }
11984
11985 if(isSupported) {
11986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11987 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
11988 rc = BAD_VALUE;
11989 } else {
11990 if(!isVideoHdrEnable)
11991 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011992 }
11993 }
11994 return rc;
11995}
11996
11997/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011998 * FUNCTION : needRotationReprocess
11999 *
12000 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12001 *
12002 * PARAMETERS : none
12003 *
12004 * RETURN : true: needed
12005 * false: no need
12006 *==========================================================================*/
12007bool QCamera3HardwareInterface::needRotationReprocess()
12008{
12009 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12010 // current rotation is not zero, and pp has the capability to process rotation
12011 LOGH("need do reprocess for rotation");
12012 return true;
12013 }
12014
12015 return false;
12016}
12017
12018/*===========================================================================
12019 * FUNCTION : needReprocess
12020 *
12021 * DESCRIPTION: if reprocess in needed
12022 *
12023 * PARAMETERS : none
12024 *
12025 * RETURN : true: needed
12026 * false: no need
12027 *==========================================================================*/
12028bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12029{
12030 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12031 // TODO: add for ZSL HDR later
12032 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12033 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12034 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12035 return true;
12036 } else {
12037 LOGH("already post processed frame");
12038 return false;
12039 }
12040 }
12041 return needRotationReprocess();
12042}
12043
12044/*===========================================================================
12045 * FUNCTION : needJpegExifRotation
12046 *
12047 * DESCRIPTION: if rotation from jpeg is needed
12048 *
12049 * PARAMETERS : none
12050 *
12051 * RETURN : true: needed
12052 * false: no need
12053 *==========================================================================*/
12054bool QCamera3HardwareInterface::needJpegExifRotation()
12055{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012056 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012057 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12058 LOGD("Need use Jpeg EXIF Rotation");
12059 return true;
12060 }
12061 return false;
12062}
12063
12064/*===========================================================================
12065 * FUNCTION : addOfflineReprocChannel
12066 *
12067 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12068 * coming from input channel
12069 *
12070 * PARAMETERS :
12071 * @config : reprocess configuration
12072 * @inputChHandle : pointer to the input (source) channel
12073 *
12074 *
12075 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12076 *==========================================================================*/
12077QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12078 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12079{
12080 int32_t rc = NO_ERROR;
12081 QCamera3ReprocessChannel *pChannel = NULL;
12082
12083 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012084 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12085 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012086 if (NULL == pChannel) {
12087 LOGE("no mem for reprocess channel");
12088 return NULL;
12089 }
12090
12091 rc = pChannel->initialize(IS_TYPE_NONE);
12092 if (rc != NO_ERROR) {
12093 LOGE("init reprocess channel failed, ret = %d", rc);
12094 delete pChannel;
12095 return NULL;
12096 }
12097
12098 // pp feature config
12099 cam_pp_feature_config_t pp_config;
12100 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12101
12102 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12103 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12104 & CAM_QCOM_FEATURE_DSDN) {
12105 //Use CPP CDS incase h/w supports it.
12106 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12107 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12108 }
12109 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12110 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12111 }
12112
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012113 if (config.hdr_param.hdr_enable) {
12114 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12115 pp_config.hdr_param = config.hdr_param;
12116 }
12117
12118 if (mForceHdrSnapshot) {
12119 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12120 pp_config.hdr_param.hdr_enable = 1;
12121 pp_config.hdr_param.hdr_need_1x = 0;
12122 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12123 }
12124
Thierry Strudel3d639192016-09-09 11:52:26 -070012125 rc = pChannel->addReprocStreamsFromSource(pp_config,
12126 config,
12127 IS_TYPE_NONE,
12128 mMetadataChannel);
12129
12130 if (rc != NO_ERROR) {
12131 delete pChannel;
12132 return NULL;
12133 }
12134 return pChannel;
12135}
12136
12137/*===========================================================================
12138 * FUNCTION : getMobicatMask
12139 *
12140 * DESCRIPTION: returns mobicat mask
12141 *
12142 * PARAMETERS : none
12143 *
12144 * RETURN : mobicat mask
12145 *
12146 *==========================================================================*/
12147uint8_t QCamera3HardwareInterface::getMobicatMask()
12148{
12149 return m_MobicatMask;
12150}
12151
12152/*===========================================================================
12153 * FUNCTION : setMobicat
12154 *
12155 * DESCRIPTION: set Mobicat on/off.
12156 *
12157 * PARAMETERS :
12158 * @params : none
12159 *
12160 * RETURN : int32_t type of status
12161 * NO_ERROR -- success
12162 * none-zero failure code
12163 *==========================================================================*/
12164int32_t QCamera3HardwareInterface::setMobicat()
12165{
12166 char value [PROPERTY_VALUE_MAX];
12167 property_get("persist.camera.mobicat", value, "0");
12168 int32_t ret = NO_ERROR;
12169 uint8_t enableMobi = (uint8_t)atoi(value);
12170
12171 if (enableMobi) {
12172 tune_cmd_t tune_cmd;
12173 tune_cmd.type = SET_RELOAD_CHROMATIX;
12174 tune_cmd.module = MODULE_ALL;
12175 tune_cmd.value = TRUE;
12176 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12177 CAM_INTF_PARM_SET_VFE_COMMAND,
12178 tune_cmd);
12179
12180 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12181 CAM_INTF_PARM_SET_PP_COMMAND,
12182 tune_cmd);
12183 }
12184 m_MobicatMask = enableMobi;
12185
12186 return ret;
12187}
12188
12189/*===========================================================================
12190* FUNCTION : getLogLevel
12191*
12192* DESCRIPTION: Reads the log level property into a variable
12193*
12194* PARAMETERS :
12195* None
12196*
12197* RETURN :
12198* None
12199*==========================================================================*/
12200void QCamera3HardwareInterface::getLogLevel()
12201{
12202 char prop[PROPERTY_VALUE_MAX];
12203 uint32_t globalLogLevel = 0;
12204
12205 property_get("persist.camera.hal.debug", prop, "0");
12206 int val = atoi(prop);
12207 if (0 <= val) {
12208 gCamHal3LogLevel = (uint32_t)val;
12209 }
12210
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012211 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012212 gKpiDebugLevel = atoi(prop);
12213
12214 property_get("persist.camera.global.debug", prop, "0");
12215 val = atoi(prop);
12216 if (0 <= val) {
12217 globalLogLevel = (uint32_t)val;
12218 }
12219
12220 /* Highest log level among hal.logs and global.logs is selected */
12221 if (gCamHal3LogLevel < globalLogLevel)
12222 gCamHal3LogLevel = globalLogLevel;
12223
12224 return;
12225}
12226
12227/*===========================================================================
12228 * FUNCTION : validateStreamRotations
12229 *
12230 * DESCRIPTION: Check if the rotations requested are supported
12231 *
12232 * PARAMETERS :
12233 * @stream_list : streams to be configured
12234 *
12235 * RETURN : NO_ERROR on success
12236 * -EINVAL on failure
12237 *
12238 *==========================================================================*/
12239int QCamera3HardwareInterface::validateStreamRotations(
12240 camera3_stream_configuration_t *streamList)
12241{
12242 int rc = NO_ERROR;
12243
12244 /*
12245 * Loop through all streams requested in configuration
12246 * Check if unsupported rotations have been requested on any of them
12247 */
12248 for (size_t j = 0; j < streamList->num_streams; j++){
12249 camera3_stream_t *newStream = streamList->streams[j];
12250
12251 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12252 bool isImplDef = (newStream->format ==
12253 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12254 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12255 isImplDef);
12256
12257 if (isRotated && (!isImplDef || isZsl)) {
12258 LOGE("Error: Unsupported rotation of %d requested for stream"
12259 "type:%d and stream format:%d",
12260 newStream->rotation, newStream->stream_type,
12261 newStream->format);
12262 rc = -EINVAL;
12263 break;
12264 }
12265 }
12266
12267 return rc;
12268}
12269
12270/*===========================================================================
12271* FUNCTION : getFlashInfo
12272*
12273* DESCRIPTION: Retrieve information about whether the device has a flash.
12274*
12275* PARAMETERS :
12276* @cameraId : Camera id to query
12277* @hasFlash : Boolean indicating whether there is a flash device
12278* associated with given camera
12279* @flashNode : If a flash device exists, this will be its device node.
12280*
12281* RETURN :
12282* None
12283*==========================================================================*/
12284void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12285 bool& hasFlash,
12286 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12287{
12288 cam_capability_t* camCapability = gCamCapability[cameraId];
12289 if (NULL == camCapability) {
12290 hasFlash = false;
12291 flashNode[0] = '\0';
12292 } else {
12293 hasFlash = camCapability->flash_available;
12294 strlcpy(flashNode,
12295 (char*)camCapability->flash_dev_name,
12296 QCAMERA_MAX_FILEPATH_LENGTH);
12297 }
12298}
12299
12300/*===========================================================================
12301* FUNCTION : getEepromVersionInfo
12302*
12303* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12304*
12305* PARAMETERS : None
12306*
12307* RETURN : string describing EEPROM version
12308* "\0" if no such info available
12309*==========================================================================*/
12310const char *QCamera3HardwareInterface::getEepromVersionInfo()
12311{
12312 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12313}
12314
12315/*===========================================================================
12316* FUNCTION : getLdafCalib
12317*
12318* DESCRIPTION: Retrieve Laser AF calibration data
12319*
12320* PARAMETERS : None
12321*
12322* RETURN : Two uint32_t describing laser AF calibration data
12323* NULL if none is available.
12324*==========================================================================*/
12325const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12326{
12327 if (mLdafCalibExist) {
12328 return &mLdafCalib[0];
12329 } else {
12330 return NULL;
12331 }
12332}
12333
12334/*===========================================================================
12335 * FUNCTION : dynamicUpdateMetaStreamInfo
12336 *
12337 * DESCRIPTION: This function:
12338 * (1) stops all the channels
12339 * (2) returns error on pending requests and buffers
12340 * (3) sends metastream_info in setparams
12341 * (4) starts all channels
12342 * This is useful when sensor has to be restarted to apply any
12343 * settings such as frame rate from a different sensor mode
12344 *
12345 * PARAMETERS : None
12346 *
12347 * RETURN : NO_ERROR on success
12348 * Error codes on failure
12349 *
12350 *==========================================================================*/
12351int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12352{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012353 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012354 int rc = NO_ERROR;
12355
12356 LOGD("E");
12357
12358 rc = stopAllChannels();
12359 if (rc < 0) {
12360 LOGE("stopAllChannels failed");
12361 return rc;
12362 }
12363
12364 rc = notifyErrorForPendingRequests();
12365 if (rc < 0) {
12366 LOGE("notifyErrorForPendingRequests failed");
12367 return rc;
12368 }
12369
12370 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12371 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12372 "Format:%d",
12373 mStreamConfigInfo.type[i],
12374 mStreamConfigInfo.stream_sizes[i].width,
12375 mStreamConfigInfo.stream_sizes[i].height,
12376 mStreamConfigInfo.postprocess_mask[i],
12377 mStreamConfigInfo.format[i]);
12378 }
12379
12380 /* Send meta stream info once again so that ISP can start */
12381 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12382 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12383 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12384 mParameters);
12385 if (rc < 0) {
12386 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12387 }
12388
12389 rc = startAllChannels();
12390 if (rc < 0) {
12391 LOGE("startAllChannels failed");
12392 return rc;
12393 }
12394
12395 LOGD("X");
12396 return rc;
12397}
12398
12399/*===========================================================================
12400 * FUNCTION : stopAllChannels
12401 *
12402 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12403 *
12404 * PARAMETERS : None
12405 *
12406 * RETURN : NO_ERROR on success
12407 * Error codes on failure
12408 *
12409 *==========================================================================*/
12410int32_t QCamera3HardwareInterface::stopAllChannels()
12411{
12412 int32_t rc = NO_ERROR;
12413
12414 LOGD("Stopping all channels");
12415 // Stop the Streams/Channels
12416 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12417 it != mStreamInfo.end(); it++) {
12418 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12419 if (channel) {
12420 channel->stop();
12421 }
12422 (*it)->status = INVALID;
12423 }
12424
12425 if (mSupportChannel) {
12426 mSupportChannel->stop();
12427 }
12428 if (mAnalysisChannel) {
12429 mAnalysisChannel->stop();
12430 }
12431 if (mRawDumpChannel) {
12432 mRawDumpChannel->stop();
12433 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012434 if (mHdrPlusRawSrcChannel) {
12435 mHdrPlusRawSrcChannel->stop();
12436 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012437 if (mMetadataChannel) {
12438 /* If content of mStreamInfo is not 0, there is metadata stream */
12439 mMetadataChannel->stop();
12440 }
12441
12442 LOGD("All channels stopped");
12443 return rc;
12444}
12445
12446/*===========================================================================
12447 * FUNCTION : startAllChannels
12448 *
12449 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12450 *
12451 * PARAMETERS : None
12452 *
12453 * RETURN : NO_ERROR on success
12454 * Error codes on failure
12455 *
12456 *==========================================================================*/
12457int32_t QCamera3HardwareInterface::startAllChannels()
12458{
12459 int32_t rc = NO_ERROR;
12460
12461 LOGD("Start all channels ");
12462 // Start the Streams/Channels
12463 if (mMetadataChannel) {
12464 /* If content of mStreamInfo is not 0, there is metadata stream */
12465 rc = mMetadataChannel->start();
12466 if (rc < 0) {
12467 LOGE("META channel start failed");
12468 return rc;
12469 }
12470 }
12471 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12472 it != mStreamInfo.end(); it++) {
12473 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12474 if (channel) {
12475 rc = channel->start();
12476 if (rc < 0) {
12477 LOGE("channel start failed");
12478 return rc;
12479 }
12480 }
12481 }
12482 if (mAnalysisChannel) {
12483 mAnalysisChannel->start();
12484 }
12485 if (mSupportChannel) {
12486 rc = mSupportChannel->start();
12487 if (rc < 0) {
12488 LOGE("Support channel start failed");
12489 return rc;
12490 }
12491 }
12492 if (mRawDumpChannel) {
12493 rc = mRawDumpChannel->start();
12494 if (rc < 0) {
12495 LOGE("RAW dump channel start failed");
12496 return rc;
12497 }
12498 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012499 if (mHdrPlusRawSrcChannel) {
12500 rc = mHdrPlusRawSrcChannel->start();
12501 if (rc < 0) {
12502 LOGE("HDR+ RAW channel start failed");
12503 return rc;
12504 }
12505 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012506
12507 LOGD("All channels started");
12508 return rc;
12509}
12510
12511/*===========================================================================
12512 * FUNCTION : notifyErrorForPendingRequests
12513 *
12514 * DESCRIPTION: This function sends error for all the pending requests/buffers
12515 *
12516 * PARAMETERS : None
12517 *
12518 * RETURN : Error codes
12519 * NO_ERROR on success
12520 *
12521 *==========================================================================*/
12522int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12523{
12524 int32_t rc = NO_ERROR;
12525 unsigned int frameNum = 0;
12526 camera3_capture_result_t result;
12527 camera3_stream_buffer_t *pStream_Buf = NULL;
12528
12529 memset(&result, 0, sizeof(camera3_capture_result_t));
12530
12531 if (mPendingRequestsList.size() > 0) {
12532 pendingRequestIterator i = mPendingRequestsList.begin();
12533 frameNum = i->frame_number;
12534 } else {
12535 /* There might still be pending buffers even though there are
12536 no pending requests. Setting the frameNum to MAX so that
12537 all the buffers with smaller frame numbers are returned */
12538 frameNum = UINT_MAX;
12539 }
12540
12541 LOGH("Oldest frame num on mPendingRequestsList = %u",
12542 frameNum);
12543
12544 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12545 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12546
12547 if (req->frame_number < frameNum) {
12548 // Send Error notify to frameworks for each buffer for which
12549 // metadata buffer is already sent
12550 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12551 req->frame_number, req->mPendingBufferList.size());
12552
12553 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12554 if (NULL == pStream_Buf) {
12555 LOGE("No memory for pending buffers array");
12556 return NO_MEMORY;
12557 }
12558 memset(pStream_Buf, 0,
12559 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12560 result.result = NULL;
12561 result.frame_number = req->frame_number;
12562 result.num_output_buffers = req->mPendingBufferList.size();
12563 result.output_buffers = pStream_Buf;
12564
12565 size_t index = 0;
12566 for (auto info = req->mPendingBufferList.begin();
12567 info != req->mPendingBufferList.end(); ) {
12568
12569 camera3_notify_msg_t notify_msg;
12570 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12571 notify_msg.type = CAMERA3_MSG_ERROR;
12572 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12573 notify_msg.message.error.error_stream = info->stream;
12574 notify_msg.message.error.frame_number = req->frame_number;
12575 pStream_Buf[index].acquire_fence = -1;
12576 pStream_Buf[index].release_fence = -1;
12577 pStream_Buf[index].buffer = info->buffer;
12578 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12579 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012580 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012581 index++;
12582 // Remove buffer from list
12583 info = req->mPendingBufferList.erase(info);
12584 }
12585
12586 // Remove this request from Map
12587 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12588 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12589 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12590
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012591 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012592
12593 delete [] pStream_Buf;
12594 } else {
12595
12596 // Go through the pending requests info and send error request to framework
12597 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12598
12599 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12600
12601 // Send error notify to frameworks
12602 camera3_notify_msg_t notify_msg;
12603 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12604 notify_msg.type = CAMERA3_MSG_ERROR;
12605 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12606 notify_msg.message.error.error_stream = NULL;
12607 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012608 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012609
12610 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12611 if (NULL == pStream_Buf) {
12612 LOGE("No memory for pending buffers array");
12613 return NO_MEMORY;
12614 }
12615 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12616
12617 result.result = NULL;
12618 result.frame_number = req->frame_number;
12619 result.input_buffer = i->input_buffer;
12620 result.num_output_buffers = req->mPendingBufferList.size();
12621 result.output_buffers = pStream_Buf;
12622
12623 size_t index = 0;
12624 for (auto info = req->mPendingBufferList.begin();
12625 info != req->mPendingBufferList.end(); ) {
12626 pStream_Buf[index].acquire_fence = -1;
12627 pStream_Buf[index].release_fence = -1;
12628 pStream_Buf[index].buffer = info->buffer;
12629 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12630 pStream_Buf[index].stream = info->stream;
12631 index++;
12632 // Remove buffer from list
12633 info = req->mPendingBufferList.erase(info);
12634 }
12635
12636 // Remove this request from Map
12637 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12638 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12639 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12640
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012641 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012642 delete [] pStream_Buf;
12643 i = erasePendingRequest(i);
12644 }
12645 }
12646
12647 /* Reset pending frame Drop list and requests list */
12648 mPendingFrameDropList.clear();
12649
12650 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12651 req.mPendingBufferList.clear();
12652 }
12653 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070012654 LOGH("Cleared all the pending buffers ");
12655
12656 return rc;
12657}
12658
12659bool QCamera3HardwareInterface::isOnEncoder(
12660 const cam_dimension_t max_viewfinder_size,
12661 uint32_t width, uint32_t height)
12662{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012663 return ((width > (uint32_t)max_viewfinder_size.width) ||
12664 (height > (uint32_t)max_viewfinder_size.height) ||
12665 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12666 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012667}
12668
12669/*===========================================================================
12670 * FUNCTION : setBundleInfo
12671 *
12672 * DESCRIPTION: Set bundle info for all streams that are bundle.
12673 *
12674 * PARAMETERS : None
12675 *
12676 * RETURN : NO_ERROR on success
12677 * Error codes on failure
12678 *==========================================================================*/
12679int32_t QCamera3HardwareInterface::setBundleInfo()
12680{
12681 int32_t rc = NO_ERROR;
12682
12683 if (mChannelHandle) {
12684 cam_bundle_config_t bundleInfo;
12685 memset(&bundleInfo, 0, sizeof(bundleInfo));
12686 rc = mCameraHandle->ops->get_bundle_info(
12687 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12688 if (rc != NO_ERROR) {
12689 LOGE("get_bundle_info failed");
12690 return rc;
12691 }
12692 if (mAnalysisChannel) {
12693 mAnalysisChannel->setBundleInfo(bundleInfo);
12694 }
12695 if (mSupportChannel) {
12696 mSupportChannel->setBundleInfo(bundleInfo);
12697 }
12698 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12699 it != mStreamInfo.end(); it++) {
12700 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12701 channel->setBundleInfo(bundleInfo);
12702 }
12703 if (mRawDumpChannel) {
12704 mRawDumpChannel->setBundleInfo(bundleInfo);
12705 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012706 if (mHdrPlusRawSrcChannel) {
12707 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
12708 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012709 }
12710
12711 return rc;
12712}
12713
12714/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012715 * FUNCTION : setInstantAEC
12716 *
12717 * DESCRIPTION: Set Instant AEC related params.
12718 *
12719 * PARAMETERS :
12720 * @meta: CameraMetadata reference
12721 *
12722 * RETURN : NO_ERROR on success
12723 * Error codes on failure
12724 *==========================================================================*/
12725int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12726{
12727 int32_t rc = NO_ERROR;
12728 uint8_t val = 0;
12729 char prop[PROPERTY_VALUE_MAX];
12730
12731 // First try to configure instant AEC from framework metadata
12732 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12733 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12734 }
12735
12736 // If framework did not set this value, try to read from set prop.
12737 if (val == 0) {
12738 memset(prop, 0, sizeof(prop));
12739 property_get("persist.camera.instant.aec", prop, "0");
12740 val = (uint8_t)atoi(prop);
12741 }
12742
12743 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12744 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12745 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12746 mInstantAEC = val;
12747 mInstantAECSettledFrameNumber = 0;
12748 mInstantAecFrameIdxCount = 0;
12749 LOGH("instantAEC value set %d",val);
12750 if (mInstantAEC) {
12751 memset(prop, 0, sizeof(prop));
12752 property_get("persist.camera.ae.instant.bound", prop, "10");
12753 int32_t aec_frame_skip_cnt = atoi(prop);
12754 if (aec_frame_skip_cnt >= 0) {
12755 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12756 } else {
12757 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12758 rc = BAD_VALUE;
12759 }
12760 }
12761 } else {
12762 LOGE("Bad instant aec value set %d", val);
12763 rc = BAD_VALUE;
12764 }
12765 return rc;
12766}
12767
12768/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012769 * FUNCTION : get_num_overall_buffers
12770 *
12771 * DESCRIPTION: Estimate number of pending buffers across all requests.
12772 *
12773 * PARAMETERS : None
12774 *
12775 * RETURN : Number of overall pending buffers
12776 *
12777 *==========================================================================*/
12778uint32_t PendingBuffersMap::get_num_overall_buffers()
12779{
12780 uint32_t sum_buffers = 0;
12781 for (auto &req : mPendingBuffersInRequest) {
12782 sum_buffers += req.mPendingBufferList.size();
12783 }
12784 return sum_buffers;
12785}
12786
12787/*===========================================================================
12788 * FUNCTION : removeBuf
12789 *
12790 * DESCRIPTION: Remove a matching buffer from tracker.
12791 *
12792 * PARAMETERS : @buffer: image buffer for the callback
12793 *
12794 * RETURN : None
12795 *
12796 *==========================================================================*/
12797void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12798{
12799 bool buffer_found = false;
12800 for (auto req = mPendingBuffersInRequest.begin();
12801 req != mPendingBuffersInRequest.end(); req++) {
12802 for (auto k = req->mPendingBufferList.begin();
12803 k != req->mPendingBufferList.end(); k++ ) {
12804 if (k->buffer == buffer) {
12805 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12806 req->frame_number, buffer);
12807 k = req->mPendingBufferList.erase(k);
12808 if (req->mPendingBufferList.empty()) {
12809 // Remove this request from Map
12810 req = mPendingBuffersInRequest.erase(req);
12811 }
12812 buffer_found = true;
12813 break;
12814 }
12815 }
12816 if (buffer_found) {
12817 break;
12818 }
12819 }
12820 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12821 get_num_overall_buffers());
12822}
12823
12824/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012825 * FUNCTION : getBufErrStatus
12826 *
12827 * DESCRIPTION: get buffer error status
12828 *
12829 * PARAMETERS : @buffer: buffer handle
12830 *
12831 * RETURN : Error status
12832 *
12833 *==========================================================================*/
12834int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12835{
12836 for (auto& req : mPendingBuffersInRequest) {
12837 for (auto& k : req.mPendingBufferList) {
12838 if (k.buffer == buffer)
12839 return k.bufStatus;
12840 }
12841 }
12842 return CAMERA3_BUFFER_STATUS_OK;
12843}
12844
12845/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012846 * FUNCTION : setPAAFSupport
12847 *
12848 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12849 * feature mask according to stream type and filter
12850 * arrangement
12851 *
12852 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12853 * @stream_type: stream type
12854 * @filter_arrangement: filter arrangement
12855 *
12856 * RETURN : None
12857 *==========================================================================*/
12858void QCamera3HardwareInterface::setPAAFSupport(
12859 cam_feature_mask_t& feature_mask,
12860 cam_stream_type_t stream_type,
12861 cam_color_filter_arrangement_t filter_arrangement)
12862{
12863 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12864 feature_mask, stream_type, filter_arrangement);
12865
12866 switch (filter_arrangement) {
12867 case CAM_FILTER_ARRANGEMENT_RGGB:
12868 case CAM_FILTER_ARRANGEMENT_GRBG:
12869 case CAM_FILTER_ARRANGEMENT_GBRG:
12870 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012871 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12872 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012873 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12874 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12875 }
12876 break;
12877 case CAM_FILTER_ARRANGEMENT_Y:
12878 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12879 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12880 }
12881 break;
12882 default:
12883 break;
12884 }
12885}
12886
12887/*===========================================================================
12888* FUNCTION : getSensorMountAngle
12889*
12890* DESCRIPTION: Retrieve sensor mount angle
12891*
12892* PARAMETERS : None
12893*
12894* RETURN : sensor mount angle in uint32_t
12895*==========================================================================*/
12896uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12897{
12898 return gCamCapability[mCameraId]->sensor_mount_angle;
12899}
12900
12901/*===========================================================================
12902* FUNCTION : getRelatedCalibrationData
12903*
12904* DESCRIPTION: Retrieve related system calibration data
12905*
12906* PARAMETERS : None
12907*
12908* RETURN : Pointer of related system calibration data
12909*==========================================================================*/
12910const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12911{
12912 return (const cam_related_system_calibration_data_t *)
12913 &(gCamCapability[mCameraId]->related_cam_calibration);
12914}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012915
12916/*===========================================================================
12917 * FUNCTION : is60HzZone
12918 *
12919 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12920 *
12921 * PARAMETERS : None
12922 *
12923 * RETURN : True if in 60Hz zone, False otherwise
12924 *==========================================================================*/
12925bool QCamera3HardwareInterface::is60HzZone()
12926{
12927 time_t t = time(NULL);
12928 struct tm lt;
12929
12930 struct tm* r = localtime_r(&t, &lt);
12931
12932 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12933 return true;
12934 else
12935 return false;
12936}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070012937
12938/*===========================================================================
12939 * FUNCTION : adjustBlackLevelForCFA
12940 *
12941 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
12942 * of bayer CFA (Color Filter Array).
12943 *
12944 * PARAMETERS : @input: black level pattern in the order of RGGB
12945 * @output: black level pattern in the order of CFA
12946 * @color_arrangement: CFA color arrangement
12947 *
12948 * RETURN : None
12949 *==========================================================================*/
12950template<typename T>
12951void QCamera3HardwareInterface::adjustBlackLevelForCFA(
12952 T input[BLACK_LEVEL_PATTERN_CNT],
12953 T output[BLACK_LEVEL_PATTERN_CNT],
12954 cam_color_filter_arrangement_t color_arrangement)
12955{
12956 switch (color_arrangement) {
12957 case CAM_FILTER_ARRANGEMENT_GRBG:
12958 output[0] = input[1];
12959 output[1] = input[0];
12960 output[2] = input[3];
12961 output[3] = input[2];
12962 break;
12963 case CAM_FILTER_ARRANGEMENT_GBRG:
12964 output[0] = input[2];
12965 output[1] = input[3];
12966 output[2] = input[0];
12967 output[3] = input[1];
12968 break;
12969 case CAM_FILTER_ARRANGEMENT_BGGR:
12970 output[0] = input[3];
12971 output[1] = input[2];
12972 output[2] = input[1];
12973 output[3] = input[0];
12974 break;
12975 case CAM_FILTER_ARRANGEMENT_RGGB:
12976 output[0] = input[0];
12977 output[1] = input[1];
12978 output[2] = input[2];
12979 output[3] = input[3];
12980 break;
12981 default:
12982 LOGE("Invalid color arrangement to derive dynamic blacklevel");
12983 break;
12984 }
12985}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012986
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012987void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
12988 CameraMetadata &resultMetadata,
12989 std::shared_ptr<metadata_buffer_t> settings)
12990{
12991 if (settings == nullptr) {
12992 ALOGE("%s: settings is nullptr.", __FUNCTION__);
12993 return;
12994 }
12995
12996 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
12997 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
12998 }
12999
13000 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
13001 String8 str((const char *)gps_methods);
13002 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
13003 }
13004
13005 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
13006 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
13007 }
13008
13009 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
13010 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
13011 }
13012
13013 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
13014 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
13015 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
13016 }
13017
13018 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
13019 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
13020 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
13021 }
13022
13023 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
13024 int32_t fwk_thumb_size[2];
13025 fwk_thumb_size[0] = thumb_size->width;
13026 fwk_thumb_size[1] = thumb_size->height;
13027 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
13028 }
13029
13030 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
13031 uint8_t fwk_intent = intent[0];
13032 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
13033 }
13034}
13035
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080013036bool QCamera3HardwareInterface::trySubmittingHdrPlusRequest(HdrPlusPendingRequest *hdrPlusRequest,
13037 const camera3_capture_request_t &request, const CameraMetadata &metadata)
13038{
13039 if (hdrPlusRequest == nullptr) return false;
13040
13041 // Check noise reduction mode is high quality.
13042 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
13043 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
13044 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
13045 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ.", __FUNCTION__);
13046 return false;
13047 }
13048
13049 // Check edge mode is high quality.
13050 if (!metadata.exists(ANDROID_EDGE_MODE) ||
13051 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
13052 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
13053 return false;
13054 }
13055
13056 if (request.num_output_buffers != 1 ||
13057 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
13058 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
13059 return false;
13060 }
13061
13062 // Get a YUV buffer from pic channel.
13063 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
13064 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
13065 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
13066 if (res != OK) {
13067 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
13068 __FUNCTION__, strerror(-res), res);
13069 return false;
13070 }
13071
13072 pbcamera::StreamBuffer buffer;
13073 buffer.streamId = kPbYuvOutputStreamId;
13074 buffer.data = yuvBuffer->buffer;
13075 buffer.dataSize = yuvBuffer->frame_len;
13076
13077 pbcamera::CaptureRequest pbRequest;
13078 pbRequest.id = request.frame_number;
13079 pbRequest.outputBuffers.push_back(buffer);
13080
13081 // Submit an HDR+ capture request to HDR+ service.
13082 res = mHdrPlusClient->submitCaptureRequest(&pbRequest);
13083 if (res != OK) {
13084 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
13085 strerror(-res), res);
13086 return false;
13087 }
13088
13089 hdrPlusRequest->yuvBuffer = yuvBuffer;
13090 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
13091
13092 return true;
13093}
13094
13095status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked(
13096 const cam_sensor_mode_info_t &sensor_mode_info)
13097{
13098 pbcamera::InputConfiguration inputConfig;
13099 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
13100 status_t res = OK;
13101
13102 // Configure HDR+ client streams.
13103 // Get input config.
13104 if (mHdrPlusRawSrcChannel) {
13105 // HDR+ input buffers will be provided by HAL.
13106 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
13107 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
13108 if (res != OK) {
13109 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
13110 __FUNCTION__, strerror(-res), res);
13111 return res;
13112 }
13113
13114 inputConfig.isSensorInput = false;
13115 } else {
13116 // Sensor MIPI will send data to Easel.
13117 inputConfig.isSensorInput = true;
13118 inputConfig.sensorMode.pixelArrayWidth = sensor_mode_info.pixel_array_size.width;
13119 inputConfig.sensorMode.pixelArrayHeight = sensor_mode_info.pixel_array_size.height;
13120 inputConfig.sensorMode.activeArrayWidth = sensor_mode_info.active_array_size.width;
13121 inputConfig.sensorMode.activeArrayHeight = sensor_mode_info.active_array_size.height;
13122 inputConfig.sensorMode.outputPixelClkHz = sensor_mode_info.op_pixel_clk;
13123 }
13124
13125 // Get output configurations.
13126 // Easel may need to output RAW16 buffers if mRawChannel was created.
13127 if (mRawChannel != nullptr) {
13128 pbcamera::StreamConfiguration outputConfig;
13129 res = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
13130 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
13131 if (res != OK) {
13132 LOGE("%s: Failed to get fill stream config for raw stream: %s (%d)",
13133 __FUNCTION__, strerror(-res), res);
13134 return res;
13135 }
13136 outputStreamConfigs.push_back(outputConfig);
13137 }
13138
13139 // Easel may need to output YUV output buffers if mPictureChannel was created.
13140 pbcamera::StreamConfiguration yuvOutputConfig;
13141 if (mPictureChannel != nullptr) {
13142 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
13143 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
13144 if (res != OK) {
13145 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
13146 __FUNCTION__, strerror(-res), res);
13147
13148 return res;
13149 }
13150
13151 outputStreamConfigs.push_back(yuvOutputConfig);
13152 }
13153
13154 // TODO: consider other channels for YUV output buffers.
13155
13156 res = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
13157 if (res != OK) {
13158 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
13159 strerror(-res), res);
13160 return res;
13161 }
13162
13163 return OK;
13164}
13165
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013166void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13167 const camera_metadata_t &resultMetadata) {
13168 if (result != nullptr) {
13169 if (result->outputBuffers.size() != 1) {
13170 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13171 result->outputBuffers.size());
13172 return;
13173 }
13174
13175 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13176 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13177 result->outputBuffers[0].streamId);
13178 return;
13179 }
13180
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013181 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013182 HdrPlusPendingRequest pendingRequest;
13183 {
13184 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13185 auto req = mHdrPlusPendingRequests.find(result->requestId);
13186 pendingRequest = req->second;
13187 }
13188
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013189 // Update the result metadata with the settings of the HDR+ still capture request because
13190 // the result metadata belongs to a ZSL buffer.
13191 CameraMetadata metadata;
13192 metadata = &resultMetadata;
13193 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
13194 camera_metadata_t* updatedResultMetadata = metadata.release();
13195
13196 QCamera3PicChannel *picChannel =
13197 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13198
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013199 // Check if dumping HDR+ YUV output is enabled.
13200 char prop[PROPERTY_VALUE_MAX];
13201 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13202 bool dumpYuvOutput = atoi(prop);
13203
13204 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013205 // Dump yuv buffer to a ppm file.
13206 pbcamera::StreamConfiguration outputConfig;
13207 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13208 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13209 if (rc == OK) {
13210 char buf[FILENAME_MAX] = {};
13211 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13212 result->requestId, result->outputBuffers[0].streamId,
13213 outputConfig.image.width, outputConfig.image.height);
13214
13215 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13216 } else {
13217 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13218 __FUNCTION__, strerror(-rc), rc);
13219 }
13220 }
13221
Chien-Yu Chen92724a82017-01-06 11:50:30 -080013222 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
13223 auto halMetadata = std::make_shared<metadata_buffer_t>();
13224 clear_metadata_buffer(halMetadata.get());
13225
13226 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
13227 // encoding.
13228 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
13229 halStreamId, /*minFrameDuration*/0);
13230 if (res == OK) {
13231 // Return the buffer to pic channel for encoding.
13232 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13233 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13234 halMetadata);
13235 } else {
13236 // Return the buffer without encoding.
13237 // TODO: This should not happen but we may want to report an error buffer to camera
13238 // service.
13239 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
13240 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
13241 strerror(-res), res);
13242 }
13243
13244 // Send HDR+ metadata to framework.
13245 {
13246 pthread_mutex_lock(&mMutex);
13247
13248 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
13249 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
13250 pthread_mutex_unlock(&mMutex);
13251 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013252
13253 // Remove the HDR+ pending request.
13254 {
13255 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13256 auto req = mHdrPlusPendingRequests.find(result->requestId);
13257 mHdrPlusPendingRequests.erase(req);
13258 }
13259 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013260}
13261
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013262void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13263 // TODO: Handle HDR+ capture failures and send the failure to framework.
13264 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13265 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13266
13267 // Return the buffer to pic channel.
13268 QCamera3PicChannel *picChannel =
13269 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13270 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13271
13272 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013273}
13274
Thierry Strudel3d639192016-09-09 11:52:26 -070013275}; //end namespace qcamera