blob: 3438b6258c1017956cca163c543dd6635fe3e6d7 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
47
48// Display dependencies
49#include "qdMetaData.h"
50
51// Camera dependencies
52#include "android/QCamera3External.h"
53#include "util/QCameraFlash.h"
54#include "QCamera3HWI.h"
55#include "QCamera3VendorTags.h"
56#include "QCameraTrace.h"
57
58extern "C" {
59#include "mm_camera_dbg.h"
60}
61
62using namespace android;
63
64namespace qcamera {
65
66#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67
68#define EMPTY_PIPELINE_DELAY 2
69#define PARTIAL_RESULT_COUNT 2
70#define FRAME_SKIP_DELAY 0
71
72#define MAX_VALUE_8BIT ((1<<8)-1)
73#define MAX_VALUE_10BIT ((1<<10)-1)
74#define MAX_VALUE_12BIT ((1<<12)-1)
75
76#define VIDEO_4K_WIDTH 3840
77#define VIDEO_4K_HEIGHT 2160
78
79#define MAX_EIS_WIDTH 1920
80#define MAX_EIS_HEIGHT 1080
81
82#define MAX_RAW_STREAMS 1
83#define MAX_STALLING_STREAMS 1
84#define MAX_PROCESSED_STREAMS 3
85/* Batch mode is enabled only if FPS set is equal to or greater than this */
86#define MIN_FPS_FOR_BATCH_MODE (120)
87#define PREVIEW_FPS_FOR_HFR (30)
88#define DEFAULT_VIDEO_FPS (30.0)
89#define MAX_HFR_BATCH_SIZE (8)
90#define REGIONS_TUPLE_COUNT 5
91#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070092// Set a threshold for detection of missing buffers //seconds
93#define MISSING_REQUEST_BUF_TIMEOUT 3
94#define FLUSH_TIMEOUT 3
95#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96
97#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
98 CAM_QCOM_FEATURE_CROP |\
99 CAM_QCOM_FEATURE_ROTATION |\
100 CAM_QCOM_FEATURE_SHARPNESS |\
101 CAM_QCOM_FEATURE_SCALE |\
102 CAM_QCOM_FEATURE_CAC |\
103 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700104/* Per configuration size for static metadata length*/
105#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700106
107#define TIMEOUT_NEVER -1
108
Thierry Strudel04e026f2016-10-10 11:27:36 -0700109/* Face landmarks indices */
110#define LEFT_EYE_X 0
111#define LEFT_EYE_Y 1
112#define RIGHT_EYE_X 2
113#define RIGHT_EYE_Y 3
114#define MOUTH_X 4
115#define MOUTH_Y 5
116#define TOTAL_LANDMARK_INDICES 6
117
Thierry Strudel3d639192016-09-09 11:52:26 -0700118cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
119const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
120extern pthread_mutex_t gCamLock;
121volatile uint32_t gCamHal3LogLevel = 1;
122extern uint8_t gNumCameraSessions;
123
124const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
125 {"On", CAM_CDS_MODE_ON},
126 {"Off", CAM_CDS_MODE_OFF},
127 {"Auto",CAM_CDS_MODE_AUTO}
128};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700129const QCamera3HardwareInterface::QCameraMap<
130 camera_metadata_enum_android_video_hdr_mode_t,
131 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
132 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
133 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
134};
135
136
137const QCamera3HardwareInterface::QCameraMap<
138 camera_metadata_enum_android_ir_mode_t,
139 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
140 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
141 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
142 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
143};
Thierry Strudel3d639192016-09-09 11:52:26 -0700144
145const QCamera3HardwareInterface::QCameraMap<
146 camera_metadata_enum_android_control_effect_mode_t,
147 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
148 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
149 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
150 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
151 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
152 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
153 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
155 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
156 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
157};
158
159const QCamera3HardwareInterface::QCameraMap<
160 camera_metadata_enum_android_control_awb_mode_t,
161 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
162 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
163 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
164 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
165 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
166 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
167 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
168 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
169 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
170 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
171};
172
173const QCamera3HardwareInterface::QCameraMap<
174 camera_metadata_enum_android_control_scene_mode_t,
175 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
176 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
177 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
178 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
179 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
180 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
181 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
182 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
183 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
184 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
185 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
186 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
187 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
188 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
189 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
190 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
191 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
192};
193
194const QCamera3HardwareInterface::QCameraMap<
195 camera_metadata_enum_android_control_af_mode_t,
196 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
197 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
198 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
199 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
200 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
201 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
202 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
203 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
204};
205
206const QCamera3HardwareInterface::QCameraMap<
207 camera_metadata_enum_android_color_correction_aberration_mode_t,
208 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
209 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
210 CAM_COLOR_CORRECTION_ABERRATION_OFF },
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
212 CAM_COLOR_CORRECTION_ABERRATION_FAST },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
214 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_ae_antibanding_mode_t,
219 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
220 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
221 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
224};
225
226const QCamera3HardwareInterface::QCameraMap<
227 camera_metadata_enum_android_control_ae_mode_t,
228 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
229 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
230 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
231 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
232 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
234};
235
236const QCamera3HardwareInterface::QCameraMap<
237 camera_metadata_enum_android_flash_mode_t,
238 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
239 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
240 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
241 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
242};
243
244const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_statistics_face_detect_mode_t,
246 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
247 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
248 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
254 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
255 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
256 CAM_FOCUS_UNCALIBRATED },
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
258 CAM_FOCUS_APPROXIMATE },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
260 CAM_FOCUS_CALIBRATED }
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_lens_state_t,
265 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
266 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
267 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
268};
269
270const int32_t available_thumbnail_sizes[] = {0, 0,
271 176, 144,
272 240, 144,
273 256, 144,
274 240, 160,
275 256, 154,
276 240, 240,
277 320, 240};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_sensor_test_pattern_mode_t,
281 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
282 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
283 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
288};
289
290/* Since there is no mapping for all the options some Android enum are not listed.
291 * Also, the order in this list is important because while mapping from HAL to Android it will
292 * traverse from lower to higher index which means that for HAL values that are map to different
293 * Android values, the traverse logic will select the first one found.
294 */
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_sensor_reference_illuminant1_t,
297 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
298 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
299 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
314};
315
316const QCamera3HardwareInterface::QCameraMap<
317 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
318 { 60, CAM_HFR_MODE_60FPS},
319 { 90, CAM_HFR_MODE_90FPS},
320 { 120, CAM_HFR_MODE_120FPS},
321 { 150, CAM_HFR_MODE_150FPS},
322 { 180, CAM_HFR_MODE_180FPS},
323 { 210, CAM_HFR_MODE_210FPS},
324 { 240, CAM_HFR_MODE_240FPS},
325 { 480, CAM_HFR_MODE_480FPS},
326};
327
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700328const QCamera3HardwareInterface::QCameraMap<
329 qcamera3_ext_instant_aec_mode_t,
330 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
331 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
332 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
333 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
334};
Thierry Strudel3d639192016-09-09 11:52:26 -0700335camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
336 .initialize = QCamera3HardwareInterface::initialize,
337 .configure_streams = QCamera3HardwareInterface::configure_streams,
338 .register_stream_buffers = NULL,
339 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
340 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
341 .get_metadata_vendor_tag_ops = NULL,
342 .dump = QCamera3HardwareInterface::dump,
343 .flush = QCamera3HardwareInterface::flush,
344 .reserved = {0},
345};
346
347// initialise to some default value
348uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
349
350/*===========================================================================
351 * FUNCTION : QCamera3HardwareInterface
352 *
353 * DESCRIPTION: constructor of QCamera3HardwareInterface
354 *
355 * PARAMETERS :
356 * @cameraId : camera ID
357 *
358 * RETURN : none
359 *==========================================================================*/
360QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
361 const camera_module_callbacks_t *callbacks)
362 : mCameraId(cameraId),
363 mCameraHandle(NULL),
364 mCameraInitialized(false),
365 mCallbackOps(NULL),
366 mMetadataChannel(NULL),
367 mPictureChannel(NULL),
368 mRawChannel(NULL),
369 mSupportChannel(NULL),
370 mAnalysisChannel(NULL),
371 mRawDumpChannel(NULL),
372 mDummyBatchChannel(NULL),
373 m_perfLock(),
374 mCommon(),
375 mChannelHandle(0),
376 mFirstConfiguration(true),
377 mFlush(false),
378 mFlushPerf(false),
379 mParamHeap(NULL),
380 mParameters(NULL),
381 mPrevParameters(NULL),
382 m_bIsVideo(false),
383 m_bIs4KVideo(false),
384 m_bEisSupportedSize(false),
385 m_bEisEnable(false),
386 m_MobicatMask(0),
387 mMinProcessedFrameDuration(0),
388 mMinJpegFrameDuration(0),
389 mMinRawFrameDuration(0),
390 mMetaFrameCount(0U),
391 mUpdateDebugLevel(false),
392 mCallbacks(callbacks),
393 mCaptureIntent(0),
394 mCacMode(0),
395 mBatchSize(0),
396 mToBeQueuedVidBufs(0),
397 mHFRVideoFps(DEFAULT_VIDEO_FPS),
398 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
399 mFirstFrameNumberInBatch(0),
400 mNeedSensorRestart(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700401 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
402 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700403 mInstantAEC(false),
404 mResetInstantAEC(false),
405 mInstantAECSettledFrameNumber(0),
406 mAecSkipDisplayFrameBound(0),
407 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700408 mLdafCalibExist(false),
409 mPowerHintEnabled(false),
410 mLastCustIntentFrmNum(-1),
411 mState(CLOSED),
412 mIsDeviceLinked(false),
413 mIsMainCamera(true),
414 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700415 m_pDualCamCmdHeap(NULL),
416 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700417{
418 getLogLevel();
419 m_perfLock.lock_init();
420 mCommon.init(gCamCapability[cameraId]);
421 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700422#ifndef USE_HAL_3_3
423 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
424#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700425 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700426#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700427 mCameraDevice.common.close = close_camera_device;
428 mCameraDevice.ops = &mCameraOps;
429 mCameraDevice.priv = this;
430 gCamCapability[cameraId]->version = CAM_HAL_V3;
431 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
432 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
433 gCamCapability[cameraId]->min_num_pp_bufs = 3;
434
435 pthread_cond_init(&mBuffersCond, NULL);
436
437 pthread_cond_init(&mRequestCond, NULL);
438 mPendingLiveRequest = 0;
439 mCurrentRequestId = -1;
440 pthread_mutex_init(&mMutex, NULL);
441
442 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
443 mDefaultMetadata[i] = NULL;
444
445 // Getting system props of different kinds
446 char prop[PROPERTY_VALUE_MAX];
447 memset(prop, 0, sizeof(prop));
448 property_get("persist.camera.raw.dump", prop, "0");
449 mEnableRawDump = atoi(prop);
450 if (mEnableRawDump)
451 LOGD("Raw dump from Camera HAL enabled");
452
453 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
454 memset(mLdafCalib, 0, sizeof(mLdafCalib));
455
456 memset(prop, 0, sizeof(prop));
457 property_get("persist.camera.tnr.preview", prop, "0");
458 m_bTnrPreview = (uint8_t)atoi(prop);
459
460 memset(prop, 0, sizeof(prop));
461 property_get("persist.camera.tnr.video", prop, "0");
462 m_bTnrVideo = (uint8_t)atoi(prop);
463
464 memset(prop, 0, sizeof(prop));
465 property_get("persist.camera.avtimer.debug", prop, "0");
466 m_debug_avtimer = (uint8_t)atoi(prop);
467
468 //Load and read GPU library.
469 lib_surface_utils = NULL;
470 LINK_get_surface_pixel_alignment = NULL;
471 mSurfaceStridePadding = CAM_PAD_TO_32;
472 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
473 if (lib_surface_utils) {
474 *(void **)&LINK_get_surface_pixel_alignment =
475 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
476 if (LINK_get_surface_pixel_alignment) {
477 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
478 }
479 dlclose(lib_surface_utils);
480 }
481}
482
483/*===========================================================================
484 * FUNCTION : ~QCamera3HardwareInterface
485 *
486 * DESCRIPTION: destructor of QCamera3HardwareInterface
487 *
488 * PARAMETERS : none
489 *
490 * RETURN : none
491 *==========================================================================*/
492QCamera3HardwareInterface::~QCamera3HardwareInterface()
493{
494 LOGD("E");
495
496 /* Turn off current power hint before acquiring perfLock in case they
497 * conflict with each other */
498 disablePowerHint();
499
500 m_perfLock.lock_acq();
501
502 /* We need to stop all streams before deleting any stream */
503 if (mRawDumpChannel) {
504 mRawDumpChannel->stop();
505 }
506
507 // NOTE: 'camera3_stream_t *' objects are already freed at
508 // this stage by the framework
509 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
510 it != mStreamInfo.end(); it++) {
511 QCamera3ProcessingChannel *channel = (*it)->channel;
512 if (channel) {
513 channel->stop();
514 }
515 }
516 if (mSupportChannel)
517 mSupportChannel->stop();
518
519 if (mAnalysisChannel) {
520 mAnalysisChannel->stop();
521 }
522 if (mMetadataChannel) {
523 mMetadataChannel->stop();
524 }
525 if (mChannelHandle) {
526 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
527 mChannelHandle);
528 LOGD("stopping channel %d", mChannelHandle);
529 }
530
531 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
532 it != mStreamInfo.end(); it++) {
533 QCamera3ProcessingChannel *channel = (*it)->channel;
534 if (channel)
535 delete channel;
536 free (*it);
537 }
538 if (mSupportChannel) {
539 delete mSupportChannel;
540 mSupportChannel = NULL;
541 }
542
543 if (mAnalysisChannel) {
544 delete mAnalysisChannel;
545 mAnalysisChannel = NULL;
546 }
547 if (mRawDumpChannel) {
548 delete mRawDumpChannel;
549 mRawDumpChannel = NULL;
550 }
551 if (mDummyBatchChannel) {
552 delete mDummyBatchChannel;
553 mDummyBatchChannel = NULL;
554 }
555
556 mPictureChannel = NULL;
557
558 if (mMetadataChannel) {
559 delete mMetadataChannel;
560 mMetadataChannel = NULL;
561 }
562
563 /* Clean up all channels */
564 if (mCameraInitialized) {
565 if(!mFirstConfiguration){
566 //send the last unconfigure
567 cam_stream_size_info_t stream_config_info;
568 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
569 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
570 stream_config_info.buffer_info.max_buffers =
571 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700572 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700573 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
574 stream_config_info);
575 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
576 if (rc < 0) {
577 LOGE("set_parms failed for unconfigure");
578 }
579 }
580 deinitParameters();
581 }
582
583 if (mChannelHandle) {
584 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
585 mChannelHandle);
586 LOGH("deleting channel %d", mChannelHandle);
587 mChannelHandle = 0;
588 }
589
590 if (mState != CLOSED)
591 closeCamera();
592
593 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
594 req.mPendingBufferList.clear();
595 }
596 mPendingBuffersMap.mPendingBuffersInRequest.clear();
597 mPendingReprocessResultList.clear();
598 for (pendingRequestIterator i = mPendingRequestsList.begin();
599 i != mPendingRequestsList.end();) {
600 i = erasePendingRequest(i);
601 }
602 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
603 if (mDefaultMetadata[i])
604 free_camera_metadata(mDefaultMetadata[i]);
605
606 m_perfLock.lock_rel();
607 m_perfLock.lock_deinit();
608
609 pthread_cond_destroy(&mRequestCond);
610
611 pthread_cond_destroy(&mBuffersCond);
612
613 pthread_mutex_destroy(&mMutex);
614 LOGD("X");
615}
616
617/*===========================================================================
618 * FUNCTION : erasePendingRequest
619 *
620 * DESCRIPTION: function to erase a desired pending request after freeing any
621 * allocated memory
622 *
623 * PARAMETERS :
624 * @i : iterator pointing to pending request to be erased
625 *
626 * RETURN : iterator pointing to the next request
627 *==========================================================================*/
628QCamera3HardwareInterface::pendingRequestIterator
629 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
630{
631 if (i->input_buffer != NULL) {
632 free(i->input_buffer);
633 i->input_buffer = NULL;
634 }
635 if (i->settings != NULL)
636 free_camera_metadata((camera_metadata_t*)i->settings);
637 return mPendingRequestsList.erase(i);
638}
639
640/*===========================================================================
641 * FUNCTION : camEvtHandle
642 *
643 * DESCRIPTION: Function registered to mm-camera-interface to handle events
644 *
645 * PARAMETERS :
646 * @camera_handle : interface layer camera handle
647 * @evt : ptr to event
648 * @user_data : user data ptr
649 *
650 * RETURN : none
651 *==========================================================================*/
652void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
653 mm_camera_event_t *evt,
654 void *user_data)
655{
656 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
657 if (obj && evt) {
658 switch(evt->server_event_type) {
659 case CAM_EVENT_TYPE_DAEMON_DIED:
660 pthread_mutex_lock(&obj->mMutex);
661 obj->mState = ERROR;
662 pthread_mutex_unlock(&obj->mMutex);
663 LOGE("Fatal, camera daemon died");
664 break;
665
666 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
667 LOGD("HAL got request pull from Daemon");
668 pthread_mutex_lock(&obj->mMutex);
669 obj->mWokenUpByDaemon = true;
670 obj->unblockRequestIfNecessary();
671 pthread_mutex_unlock(&obj->mMutex);
672 break;
673
674 default:
675 LOGW("Warning: Unhandled event %d",
676 evt->server_event_type);
677 break;
678 }
679 } else {
680 LOGE("NULL user_data/evt");
681 }
682}
683
684/*===========================================================================
685 * FUNCTION : openCamera
686 *
687 * DESCRIPTION: open camera
688 *
689 * PARAMETERS :
690 * @hw_device : double ptr for camera device struct
691 *
692 * RETURN : int32_t type of status
693 * NO_ERROR -- success
694 * none-zero failure code
695 *==========================================================================*/
696int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
697{
698 int rc = 0;
699 if (mState != CLOSED) {
700 *hw_device = NULL;
701 return PERMISSION_DENIED;
702 }
703
704 m_perfLock.lock_acq();
705 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
706 mCameraId);
707
708 rc = openCamera();
709 if (rc == 0) {
710 *hw_device = &mCameraDevice.common;
711 } else
712 *hw_device = NULL;
713
714 m_perfLock.lock_rel();
715 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
716 mCameraId, rc);
717
718 if (rc == NO_ERROR) {
719 mState = OPENED;
720 }
721 return rc;
722}
723
724/*===========================================================================
725 * FUNCTION : openCamera
726 *
727 * DESCRIPTION: open camera
728 *
729 * PARAMETERS : none
730 *
731 * RETURN : int32_t type of status
732 * NO_ERROR -- success
733 * none-zero failure code
734 *==========================================================================*/
735int QCamera3HardwareInterface::openCamera()
736{
737 int rc = 0;
738 char value[PROPERTY_VALUE_MAX];
739
740 KPI_ATRACE_CALL();
741 if (mCameraHandle) {
742 LOGE("Failure: Camera already opened");
743 return ALREADY_EXISTS;
744 }
745
746 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
747 if (rc < 0) {
748 LOGE("Failed to reserve flash for camera id: %d",
749 mCameraId);
750 return UNKNOWN_ERROR;
751 }
752
753 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
754 if (rc) {
755 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
756 return rc;
757 }
758
759 if (!mCameraHandle) {
760 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
761 return -ENODEV;
762 }
763
764 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
765 camEvtHandle, (void *)this);
766
767 if (rc < 0) {
768 LOGE("Error, failed to register event callback");
769 /* Not closing camera here since it is already handled in destructor */
770 return FAILED_TRANSACTION;
771 }
772
773 mExifParams.debug_params =
774 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
775 if (mExifParams.debug_params) {
776 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
777 } else {
778 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
779 return NO_MEMORY;
780 }
781 mFirstConfiguration = true;
782
783 //Notify display HAL that a camera session is active.
784 //But avoid calling the same during bootup because camera service might open/close
785 //cameras at boot time during its initialization and display service will also internally
786 //wait for camera service to initialize first while calling this display API, resulting in a
787 //deadlock situation. Since boot time camera open/close calls are made only to fetch
788 //capabilities, no need of this display bw optimization.
789 //Use "service.bootanim.exit" property to know boot status.
790 property_get("service.bootanim.exit", value, "0");
791 if (atoi(value) == 1) {
792 pthread_mutex_lock(&gCamLock);
793 if (gNumCameraSessions++ == 0) {
794 setCameraLaunchStatus(true);
795 }
796 pthread_mutex_unlock(&gCamLock);
797 }
798
799 //fill the session id needed while linking dual cam
800 pthread_mutex_lock(&gCamLock);
801 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
802 &sessionId[mCameraId]);
803 pthread_mutex_unlock(&gCamLock);
804
805 if (rc < 0) {
806 LOGE("Error, failed to get sessiion id");
807 return UNKNOWN_ERROR;
808 } else {
809 //Allocate related cam sync buffer
810 //this is needed for the payload that goes along with bundling cmd for related
811 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700812 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
813 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700814 if(rc != OK) {
815 rc = NO_MEMORY;
816 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
817 return NO_MEMORY;
818 }
819
820 //Map memory for related cam sync buffer
821 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700822 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
823 m_pDualCamCmdHeap->getFd(0),
824 sizeof(cam_dual_camera_cmd_info_t),
825 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700826 if(rc < 0) {
827 LOGE("Dualcam: failed to map Related cam sync buffer");
828 rc = FAILED_TRANSACTION;
829 return NO_MEMORY;
830 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700831 m_pDualCamCmdPtr =
832 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700833 }
834
835 LOGH("mCameraId=%d",mCameraId);
836
837 return NO_ERROR;
838}
839
840/*===========================================================================
841 * FUNCTION : closeCamera
842 *
843 * DESCRIPTION: close camera
844 *
845 * PARAMETERS : none
846 *
847 * RETURN : int32_t type of status
848 * NO_ERROR -- success
849 * none-zero failure code
850 *==========================================================================*/
851int QCamera3HardwareInterface::closeCamera()
852{
853 KPI_ATRACE_CALL();
854 int rc = NO_ERROR;
855 char value[PROPERTY_VALUE_MAX];
856
857 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
858 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700859
860 // unmap memory for related cam sync buffer
861 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
862 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700863 if (NULL != m_pDualCamCmdHeap) {
864 m_pDualCamCmdHeap->deallocate();
865 delete m_pDualCamCmdHeap;
866 m_pDualCamCmdHeap = NULL;
867 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700868 }
869
Thierry Strudel3d639192016-09-09 11:52:26 -0700870 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
871 mCameraHandle = NULL;
872
873 //reset session id to some invalid id
874 pthread_mutex_lock(&gCamLock);
875 sessionId[mCameraId] = 0xDEADBEEF;
876 pthread_mutex_unlock(&gCamLock);
877
878 //Notify display HAL that there is no active camera session
879 //but avoid calling the same during bootup. Refer to openCamera
880 //for more details.
881 property_get("service.bootanim.exit", value, "0");
882 if (atoi(value) == 1) {
883 pthread_mutex_lock(&gCamLock);
884 if (--gNumCameraSessions == 0) {
885 setCameraLaunchStatus(false);
886 }
887 pthread_mutex_unlock(&gCamLock);
888 }
889
Thierry Strudel3d639192016-09-09 11:52:26 -0700890 if (mExifParams.debug_params) {
891 free(mExifParams.debug_params);
892 mExifParams.debug_params = NULL;
893 }
894 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
895 LOGW("Failed to release flash for camera id: %d",
896 mCameraId);
897 }
898 mState = CLOSED;
899 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
900 mCameraId, rc);
901 return rc;
902}
903
904/*===========================================================================
905 * FUNCTION : initialize
906 *
907 * DESCRIPTION: Initialize frameworks callback functions
908 *
909 * PARAMETERS :
910 * @callback_ops : callback function to frameworks
911 *
912 * RETURN :
913 *
914 *==========================================================================*/
915int QCamera3HardwareInterface::initialize(
916 const struct camera3_callback_ops *callback_ops)
917{
918 ATRACE_CALL();
919 int rc;
920
921 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
922 pthread_mutex_lock(&mMutex);
923
924 // Validate current state
925 switch (mState) {
926 case OPENED:
927 /* valid state */
928 break;
929 default:
930 LOGE("Invalid state %d", mState);
931 rc = -ENODEV;
932 goto err1;
933 }
934
935 rc = initParameters();
936 if (rc < 0) {
937 LOGE("initParamters failed %d", rc);
938 goto err1;
939 }
940 mCallbackOps = callback_ops;
941
942 mChannelHandle = mCameraHandle->ops->add_channel(
943 mCameraHandle->camera_handle, NULL, NULL, this);
944 if (mChannelHandle == 0) {
945 LOGE("add_channel failed");
946 rc = -ENOMEM;
947 pthread_mutex_unlock(&mMutex);
948 return rc;
949 }
950
951 pthread_mutex_unlock(&mMutex);
952 mCameraInitialized = true;
953 mState = INITIALIZED;
954 LOGI("X");
955 return 0;
956
957err1:
958 pthread_mutex_unlock(&mMutex);
959 return rc;
960}
961
962/*===========================================================================
963 * FUNCTION : validateStreamDimensions
964 *
965 * DESCRIPTION: Check if the configuration requested are those advertised
966 *
967 * PARAMETERS :
968 * @stream_list : streams to be configured
969 *
970 * RETURN :
971 *
972 *==========================================================================*/
973int QCamera3HardwareInterface::validateStreamDimensions(
974 camera3_stream_configuration_t *streamList)
975{
976 int rc = NO_ERROR;
977 size_t count = 0;
978
979 camera3_stream_t *inputStream = NULL;
980 /*
981 * Loop through all streams to find input stream if it exists*
982 */
983 for (size_t i = 0; i< streamList->num_streams; i++) {
984 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
985 if (inputStream != NULL) {
986 LOGE("Error, Multiple input streams requested");
987 return -EINVAL;
988 }
989 inputStream = streamList->streams[i];
990 }
991 }
992 /*
993 * Loop through all streams requested in configuration
994 * Check if unsupported sizes have been requested on any of them
995 */
996 for (size_t j = 0; j < streamList->num_streams; j++) {
997 bool sizeFound = false;
998 camera3_stream_t *newStream = streamList->streams[j];
999
1000 uint32_t rotatedHeight = newStream->height;
1001 uint32_t rotatedWidth = newStream->width;
1002 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1003 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1004 rotatedHeight = newStream->width;
1005 rotatedWidth = newStream->height;
1006 }
1007
1008 /*
1009 * Sizes are different for each type of stream format check against
1010 * appropriate table.
1011 */
1012 switch (newStream->format) {
1013 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1014 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1015 case HAL_PIXEL_FORMAT_RAW10:
1016 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1017 for (size_t i = 0; i < count; i++) {
1018 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1019 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1020 sizeFound = true;
1021 break;
1022 }
1023 }
1024 break;
1025 case HAL_PIXEL_FORMAT_BLOB:
1026 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1027 /* Verify set size against generated sizes table */
1028 for (size_t i = 0; i < count; i++) {
1029 if (((int32_t)rotatedWidth ==
1030 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1031 ((int32_t)rotatedHeight ==
1032 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1033 sizeFound = true;
1034 break;
1035 }
1036 }
1037 break;
1038 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1040 default:
1041 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1042 || newStream->stream_type == CAMERA3_STREAM_INPUT
1043 || IS_USAGE_ZSL(newStream->usage)) {
1044 if (((int32_t)rotatedWidth ==
1045 gCamCapability[mCameraId]->active_array_size.width) &&
1046 ((int32_t)rotatedHeight ==
1047 gCamCapability[mCameraId]->active_array_size.height)) {
1048 sizeFound = true;
1049 break;
1050 }
1051 /* We could potentially break here to enforce ZSL stream
1052 * set from frameworks always is full active array size
1053 * but it is not clear from the spc if framework will always
1054 * follow that, also we have logic to override to full array
1055 * size, so keeping the logic lenient at the moment
1056 */
1057 }
1058 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1059 MAX_SIZES_CNT);
1060 for (size_t i = 0; i < count; i++) {
1061 if (((int32_t)rotatedWidth ==
1062 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1063 ((int32_t)rotatedHeight ==
1064 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1065 sizeFound = true;
1066 break;
1067 }
1068 }
1069 break;
1070 } /* End of switch(newStream->format) */
1071
1072 /* We error out even if a single stream has unsupported size set */
1073 if (!sizeFound) {
1074 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1075 rotatedWidth, rotatedHeight, newStream->format,
1076 gCamCapability[mCameraId]->active_array_size.width,
1077 gCamCapability[mCameraId]->active_array_size.height);
1078 rc = -EINVAL;
1079 break;
1080 }
1081 } /* End of for each stream */
1082 return rc;
1083}
1084
1085/*==============================================================================
1086 * FUNCTION : isSupportChannelNeeded
1087 *
1088 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1089 *
1090 * PARAMETERS :
1091 * @stream_list : streams to be configured
1092 * @stream_config_info : the config info for streams to be configured
1093 *
1094 * RETURN : Boolen true/false decision
1095 *
1096 *==========================================================================*/
1097bool QCamera3HardwareInterface::isSupportChannelNeeded(
1098 camera3_stream_configuration_t *streamList,
1099 cam_stream_size_info_t stream_config_info)
1100{
1101 uint32_t i;
1102 bool pprocRequested = false;
1103 /* Check for conditions where PProc pipeline does not have any streams*/
1104 for (i = 0; i < stream_config_info.num_streams; i++) {
1105 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1106 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1107 pprocRequested = true;
1108 break;
1109 }
1110 }
1111
1112 if (pprocRequested == false )
1113 return true;
1114
1115 /* Dummy stream needed if only raw or jpeg streams present */
1116 for (i = 0; i < streamList->num_streams; i++) {
1117 switch(streamList->streams[i]->format) {
1118 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1119 case HAL_PIXEL_FORMAT_RAW10:
1120 case HAL_PIXEL_FORMAT_RAW16:
1121 case HAL_PIXEL_FORMAT_BLOB:
1122 break;
1123 default:
1124 return false;
1125 }
1126 }
1127 return true;
1128}
1129
1130/*==============================================================================
1131 * FUNCTION : getSensorOutputSize
1132 *
1133 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1134 *
1135 * PARAMETERS :
1136 * @sensor_dim : sensor output dimension (output)
1137 *
1138 * RETURN : int32_t type of status
1139 * NO_ERROR -- success
1140 * none-zero failure code
1141 *
1142 *==========================================================================*/
1143int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1144{
1145 int32_t rc = NO_ERROR;
1146
1147 cam_dimension_t max_dim = {0, 0};
1148 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1149 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1150 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1151 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1152 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1153 }
1154
1155 clear_metadata_buffer(mParameters);
1156
1157 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1158 max_dim);
1159 if (rc != NO_ERROR) {
1160 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1161 return rc;
1162 }
1163
1164 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1165 if (rc != NO_ERROR) {
1166 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1167 return rc;
1168 }
1169
1170 clear_metadata_buffer(mParameters);
1171 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1172
1173 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1174 mParameters);
1175 if (rc != NO_ERROR) {
1176 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1177 return rc;
1178 }
1179
1180 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1181 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1182
1183 return rc;
1184}
1185
1186/*==============================================================================
1187 * FUNCTION : enablePowerHint
1188 *
1189 * DESCRIPTION: enable single powerhint for preview and different video modes.
1190 *
1191 * PARAMETERS :
1192 *
1193 * RETURN : NULL
1194 *
1195 *==========================================================================*/
1196void QCamera3HardwareInterface::enablePowerHint()
1197{
1198 if (!mPowerHintEnabled) {
1199 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1200 mPowerHintEnabled = true;
1201 }
1202}
1203
1204/*==============================================================================
1205 * FUNCTION : disablePowerHint
1206 *
1207 * DESCRIPTION: disable current powerhint.
1208 *
1209 * PARAMETERS :
1210 *
1211 * RETURN : NULL
1212 *
1213 *==========================================================================*/
1214void QCamera3HardwareInterface::disablePowerHint()
1215{
1216 if (mPowerHintEnabled) {
1217 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1218 mPowerHintEnabled = false;
1219 }
1220}
1221
1222/*==============================================================================
1223 * FUNCTION : addToPPFeatureMask
1224 *
1225 * DESCRIPTION: add additional features to pp feature mask based on
1226 * stream type and usecase
1227 *
1228 * PARAMETERS :
1229 * @stream_format : stream type for feature mask
1230 * @stream_idx : stream idx within postprocess_mask list to change
1231 *
1232 * RETURN : NULL
1233 *
1234 *==========================================================================*/
1235void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1236 uint32_t stream_idx)
1237{
1238 char feature_mask_value[PROPERTY_VALUE_MAX];
1239 cam_feature_mask_t feature_mask;
1240 int args_converted;
1241 int property_len;
1242
1243 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001244#ifdef _LE_CAMERA_
1245 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1246 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1247 property_len = property_get("persist.camera.hal3.feature",
1248 feature_mask_value, swtnr_feature_mask_value);
1249#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001250 property_len = property_get("persist.camera.hal3.feature",
1251 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001252#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001253 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1254 (feature_mask_value[1] == 'x')) {
1255 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1256 } else {
1257 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1258 }
1259 if (1 != args_converted) {
1260 feature_mask = 0;
1261 LOGE("Wrong feature mask %s", feature_mask_value);
1262 return;
1263 }
1264
1265 switch (stream_format) {
1266 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1267 /* Add LLVD to pp feature mask only if video hint is enabled */
1268 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1269 mStreamConfigInfo.postprocess_mask[stream_idx]
1270 |= CAM_QTI_FEATURE_SW_TNR;
1271 LOGH("Added SW TNR to pp feature mask");
1272 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1273 mStreamConfigInfo.postprocess_mask[stream_idx]
1274 |= CAM_QCOM_FEATURE_LLVD;
1275 LOGH("Added LLVD SeeMore to pp feature mask");
1276 }
1277 break;
1278 }
1279 default:
1280 break;
1281 }
1282 LOGD("PP feature mask %llx",
1283 mStreamConfigInfo.postprocess_mask[stream_idx]);
1284}
1285
1286/*==============================================================================
1287 * FUNCTION : updateFpsInPreviewBuffer
1288 *
1289 * DESCRIPTION: update FPS information in preview buffer.
1290 *
1291 * PARAMETERS :
1292 * @metadata : pointer to metadata buffer
1293 * @frame_number: frame_number to look for in pending buffer list
1294 *
1295 * RETURN : None
1296 *
1297 *==========================================================================*/
1298void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1299 uint32_t frame_number)
1300{
1301 // Mark all pending buffers for this particular request
1302 // with corresponding framerate information
1303 for (List<PendingBuffersInRequest>::iterator req =
1304 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1305 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1306 for(List<PendingBufferInfo>::iterator j =
1307 req->mPendingBufferList.begin();
1308 j != req->mPendingBufferList.end(); j++) {
1309 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1310 if ((req->frame_number == frame_number) &&
1311 (channel->getStreamTypeMask() &
1312 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1313 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1314 CAM_INTF_PARM_FPS_RANGE, metadata) {
1315 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1316 struct private_handle_t *priv_handle =
1317 (struct private_handle_t *)(*(j->buffer));
1318 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1319 }
1320 }
1321 }
1322 }
1323}
1324
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001325/*==============================================================================
1326 * FUNCTION : updateTimeStampInPendingBuffers
1327 *
1328 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1329 * of a frame number
1330 *
1331 * PARAMETERS :
1332 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1333 * @timestamp : timestamp to be set
1334 *
1335 * RETURN : None
1336 *
1337 *==========================================================================*/
1338void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1339 uint32_t frameNumber, nsecs_t timestamp)
1340{
1341 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1342 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1343 if (req->frame_number != frameNumber)
1344 continue;
1345
1346 for (auto k = req->mPendingBufferList.begin();
1347 k != req->mPendingBufferList.end(); k++ ) {
1348 struct private_handle_t *priv_handle =
1349 (struct private_handle_t *) (*(k->buffer));
1350 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1351 }
1352 }
1353 return;
1354}
1355
Thierry Strudel3d639192016-09-09 11:52:26 -07001356/*===========================================================================
1357 * FUNCTION : configureStreams
1358 *
1359 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1360 * and output streams.
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 *
1367 *==========================================================================*/
1368int QCamera3HardwareInterface::configureStreams(
1369 camera3_stream_configuration_t *streamList)
1370{
1371 ATRACE_CALL();
1372 int rc = 0;
1373
1374 // Acquire perfLock before configure streams
1375 m_perfLock.lock_acq();
1376 rc = configureStreamsPerfLocked(streamList);
1377 m_perfLock.lock_rel();
1378
1379 return rc;
1380}
1381
1382/*===========================================================================
1383 * FUNCTION : configureStreamsPerfLocked
1384 *
1385 * DESCRIPTION: configureStreams while perfLock is held.
1386 *
1387 * PARAMETERS :
1388 * @stream_list : streams to be configured
1389 *
1390 * RETURN : int32_t type of status
1391 * NO_ERROR -- success
1392 * none-zero failure code
1393 *==========================================================================*/
1394int QCamera3HardwareInterface::configureStreamsPerfLocked(
1395 camera3_stream_configuration_t *streamList)
1396{
1397 ATRACE_CALL();
1398 int rc = 0;
1399
1400 // Sanity check stream_list
1401 if (streamList == NULL) {
1402 LOGE("NULL stream configuration");
1403 return BAD_VALUE;
1404 }
1405 if (streamList->streams == NULL) {
1406 LOGE("NULL stream list");
1407 return BAD_VALUE;
1408 }
1409
1410 if (streamList->num_streams < 1) {
1411 LOGE("Bad number of streams requested: %d",
1412 streamList->num_streams);
1413 return BAD_VALUE;
1414 }
1415
1416 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1417 LOGE("Maximum number of streams %d exceeded: %d",
1418 MAX_NUM_STREAMS, streamList->num_streams);
1419 return BAD_VALUE;
1420 }
1421
1422 mOpMode = streamList->operation_mode;
1423 LOGD("mOpMode: %d", mOpMode);
1424
1425 /* first invalidate all the steams in the mStreamList
1426 * if they appear again, they will be validated */
1427 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1428 it != mStreamInfo.end(); it++) {
1429 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1430 if (channel) {
1431 channel->stop();
1432 }
1433 (*it)->status = INVALID;
1434 }
1435
1436 if (mRawDumpChannel) {
1437 mRawDumpChannel->stop();
1438 delete mRawDumpChannel;
1439 mRawDumpChannel = NULL;
1440 }
1441
1442 if (mSupportChannel)
1443 mSupportChannel->stop();
1444
1445 if (mAnalysisChannel) {
1446 mAnalysisChannel->stop();
1447 }
1448 if (mMetadataChannel) {
1449 /* If content of mStreamInfo is not 0, there is metadata stream */
1450 mMetadataChannel->stop();
1451 }
1452 if (mChannelHandle) {
1453 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1454 mChannelHandle);
1455 LOGD("stopping channel %d", mChannelHandle);
1456 }
1457
1458 pthread_mutex_lock(&mMutex);
1459
1460 // Check state
1461 switch (mState) {
1462 case INITIALIZED:
1463 case CONFIGURED:
1464 case STARTED:
1465 /* valid state */
1466 break;
1467 default:
1468 LOGE("Invalid state %d", mState);
1469 pthread_mutex_unlock(&mMutex);
1470 return -ENODEV;
1471 }
1472
1473 /* Check whether we have video stream */
1474 m_bIs4KVideo = false;
1475 m_bIsVideo = false;
1476 m_bEisSupportedSize = false;
1477 m_bTnrEnabled = false;
1478 bool isZsl = false;
1479 uint32_t videoWidth = 0U;
1480 uint32_t videoHeight = 0U;
1481 size_t rawStreamCnt = 0;
1482 size_t stallStreamCnt = 0;
1483 size_t processedStreamCnt = 0;
1484 // Number of streams on ISP encoder path
1485 size_t numStreamsOnEncoder = 0;
1486 size_t numYuv888OnEncoder = 0;
1487 bool bYuv888OverrideJpeg = false;
1488 cam_dimension_t largeYuv888Size = {0, 0};
1489 cam_dimension_t maxViewfinderSize = {0, 0};
1490 bool bJpegExceeds4K = false;
1491 bool bJpegOnEncoder = false;
1492 bool bUseCommonFeatureMask = false;
1493 cam_feature_mask_t commonFeatureMask = 0;
1494 bool bSmallJpegSize = false;
1495 uint32_t width_ratio;
1496 uint32_t height_ratio;
1497 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1498 camera3_stream_t *inputStream = NULL;
1499 bool isJpeg = false;
1500 cam_dimension_t jpegSize = {0, 0};
1501
1502 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1503
1504 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001505 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001506 uint8_t eis_prop_set;
1507 uint32_t maxEisWidth = 0;
1508 uint32_t maxEisHeight = 0;
1509
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001510 // Initialize all instant AEC related variables
1511 mInstantAEC = false;
1512 mResetInstantAEC = false;
1513 mInstantAECSettledFrameNumber = 0;
1514 mAecSkipDisplayFrameBound = 0;
1515 mInstantAecFrameIdxCount = 0;
1516
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1518
1519 size_t count = IS_TYPE_MAX;
1520 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1521 for (size_t i = 0; i < count; i++) {
1522 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001523 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1524 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001525 break;
1526 }
1527 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001528 count = CAM_OPT_STAB_MAX;
1529 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1530 for (size_t i = 0; i < count; i++) {
1531 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1532 oisSupported = true;
1533 break;
1534 }
1535 }
1536
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001537 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001538 maxEisWidth = MAX_EIS_WIDTH;
1539 maxEisHeight = MAX_EIS_HEIGHT;
1540 }
1541
1542 /* EIS setprop control */
1543 char eis_prop[PROPERTY_VALUE_MAX];
1544 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001545 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001546 eis_prop_set = (uint8_t)atoi(eis_prop);
1547
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001548 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001549 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1550
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001551 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1552 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1553
Thierry Strudel3d639192016-09-09 11:52:26 -07001554 /* stream configurations */
1555 for (size_t i = 0; i < streamList->num_streams; i++) {
1556 camera3_stream_t *newStream = streamList->streams[i];
1557 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1558 "height = %d, rotation = %d, usage = 0x%x",
1559 i, newStream->stream_type, newStream->format,
1560 newStream->width, newStream->height, newStream->rotation,
1561 newStream->usage);
1562 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1563 newStream->stream_type == CAMERA3_STREAM_INPUT){
1564 isZsl = true;
1565 }
1566 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1567 inputStream = newStream;
1568 }
1569
1570 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1571 isJpeg = true;
1572 jpegSize.width = newStream->width;
1573 jpegSize.height = newStream->height;
1574 if (newStream->width > VIDEO_4K_WIDTH ||
1575 newStream->height > VIDEO_4K_HEIGHT)
1576 bJpegExceeds4K = true;
1577 }
1578
1579 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1580 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1581 m_bIsVideo = true;
1582 videoWidth = newStream->width;
1583 videoHeight = newStream->height;
1584 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1585 (VIDEO_4K_HEIGHT <= newStream->height)) {
1586 m_bIs4KVideo = true;
1587 }
1588 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1589 (newStream->height <= maxEisHeight);
1590 }
1591 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1592 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1593 switch (newStream->format) {
1594 case HAL_PIXEL_FORMAT_BLOB:
1595 stallStreamCnt++;
1596 if (isOnEncoder(maxViewfinderSize, newStream->width,
1597 newStream->height)) {
1598 numStreamsOnEncoder++;
1599 bJpegOnEncoder = true;
1600 }
1601 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1602 newStream->width);
1603 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1604 newStream->height);;
1605 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1606 "FATAL: max_downscale_factor cannot be zero and so assert");
1607 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1608 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1609 LOGH("Setting small jpeg size flag to true");
1610 bSmallJpegSize = true;
1611 }
1612 break;
1613 case HAL_PIXEL_FORMAT_RAW10:
1614 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1615 case HAL_PIXEL_FORMAT_RAW16:
1616 rawStreamCnt++;
1617 break;
1618 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1619 processedStreamCnt++;
1620 if (isOnEncoder(maxViewfinderSize, newStream->width,
1621 newStream->height)) {
1622 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1623 !IS_USAGE_ZSL(newStream->usage)) {
1624 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1625 }
1626 numStreamsOnEncoder++;
1627 }
1628 break;
1629 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1630 processedStreamCnt++;
1631 if (isOnEncoder(maxViewfinderSize, newStream->width,
1632 newStream->height)) {
1633 // If Yuv888 size is not greater than 4K, set feature mask
1634 // to SUPERSET so that it support concurrent request on
1635 // YUV and JPEG.
1636 if (newStream->width <= VIDEO_4K_WIDTH &&
1637 newStream->height <= VIDEO_4K_HEIGHT) {
1638 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1639 }
1640 numStreamsOnEncoder++;
1641 numYuv888OnEncoder++;
1642 largeYuv888Size.width = newStream->width;
1643 largeYuv888Size.height = newStream->height;
1644 }
1645 break;
1646 default:
1647 processedStreamCnt++;
1648 if (isOnEncoder(maxViewfinderSize, newStream->width,
1649 newStream->height)) {
1650 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1651 numStreamsOnEncoder++;
1652 }
1653 break;
1654 }
1655
1656 }
1657 }
1658
1659 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1660 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1661 !m_bIsVideo) {
1662 m_bEisEnable = false;
1663 }
1664
1665 /* Logic to enable/disable TNR based on specific config size/etc.*/
1666 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1667 ((videoWidth == 1920 && videoHeight == 1080) ||
1668 (videoWidth == 1280 && videoHeight == 720)) &&
1669 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1670 m_bTnrEnabled = true;
1671
1672 /* Check if num_streams is sane */
1673 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1674 rawStreamCnt > MAX_RAW_STREAMS ||
1675 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1676 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1677 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1678 pthread_mutex_unlock(&mMutex);
1679 return -EINVAL;
1680 }
1681 /* Check whether we have zsl stream or 4k video case */
1682 if (isZsl && m_bIsVideo) {
1683 LOGE("Currently invalid configuration ZSL&Video!");
1684 pthread_mutex_unlock(&mMutex);
1685 return -EINVAL;
1686 }
1687 /* Check if stream sizes are sane */
1688 if (numStreamsOnEncoder > 2) {
1689 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1690 pthread_mutex_unlock(&mMutex);
1691 return -EINVAL;
1692 } else if (1 < numStreamsOnEncoder){
1693 bUseCommonFeatureMask = true;
1694 LOGH("Multiple streams above max viewfinder size, common mask needed");
1695 }
1696
1697 /* Check if BLOB size is greater than 4k in 4k recording case */
1698 if (m_bIs4KVideo && bJpegExceeds4K) {
1699 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1700 pthread_mutex_unlock(&mMutex);
1701 return -EINVAL;
1702 }
1703
1704 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1705 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1706 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1707 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1708 // configurations:
1709 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1710 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1711 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1712 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1713 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1714 __func__);
1715 pthread_mutex_unlock(&mMutex);
1716 return -EINVAL;
1717 }
1718
1719 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1720 // the YUV stream's size is greater or equal to the JPEG size, set common
1721 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1722 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1723 jpegSize.width, jpegSize.height) &&
1724 largeYuv888Size.width > jpegSize.width &&
1725 largeYuv888Size.height > jpegSize.height) {
1726 bYuv888OverrideJpeg = true;
1727 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1728 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1729 }
1730
1731 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1732 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1733 commonFeatureMask);
1734 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1735 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1736
1737 rc = validateStreamDimensions(streamList);
1738 if (rc == NO_ERROR) {
1739 rc = validateStreamRotations(streamList);
1740 }
1741 if (rc != NO_ERROR) {
1742 LOGE("Invalid stream configuration requested!");
1743 pthread_mutex_unlock(&mMutex);
1744 return rc;
1745 }
1746
1747 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1748 for (size_t i = 0; i < streamList->num_streams; i++) {
1749 camera3_stream_t *newStream = streamList->streams[i];
1750 LOGH("newStream type = %d, stream format = %d "
1751 "stream size : %d x %d, stream rotation = %d",
1752 newStream->stream_type, newStream->format,
1753 newStream->width, newStream->height, newStream->rotation);
1754 //if the stream is in the mStreamList validate it
1755 bool stream_exists = false;
1756 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1757 it != mStreamInfo.end(); it++) {
1758 if ((*it)->stream == newStream) {
1759 QCamera3ProcessingChannel *channel =
1760 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1761 stream_exists = true;
1762 if (channel)
1763 delete channel;
1764 (*it)->status = VALID;
1765 (*it)->stream->priv = NULL;
1766 (*it)->channel = NULL;
1767 }
1768 }
1769 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1770 //new stream
1771 stream_info_t* stream_info;
1772 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1773 if (!stream_info) {
1774 LOGE("Could not allocate stream info");
1775 rc = -ENOMEM;
1776 pthread_mutex_unlock(&mMutex);
1777 return rc;
1778 }
1779 stream_info->stream = newStream;
1780 stream_info->status = VALID;
1781 stream_info->channel = NULL;
1782 mStreamInfo.push_back(stream_info);
1783 }
1784 /* Covers Opaque ZSL and API1 F/W ZSL */
1785 if (IS_USAGE_ZSL(newStream->usage)
1786 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1787 if (zslStream != NULL) {
1788 LOGE("Multiple input/reprocess streams requested!");
1789 pthread_mutex_unlock(&mMutex);
1790 return BAD_VALUE;
1791 }
1792 zslStream = newStream;
1793 }
1794 /* Covers YUV reprocess */
1795 if (inputStream != NULL) {
1796 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1797 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1798 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1799 && inputStream->width == newStream->width
1800 && inputStream->height == newStream->height) {
1801 if (zslStream != NULL) {
1802 /* This scenario indicates multiple YUV streams with same size
1803 * as input stream have been requested, since zsl stream handle
1804 * is solely use for the purpose of overriding the size of streams
1805 * which share h/w streams we will just make a guess here as to
1806 * which of the stream is a ZSL stream, this will be refactored
1807 * once we make generic logic for streams sharing encoder output
1808 */
1809 LOGH("Warning, Multiple ip/reprocess streams requested!");
1810 }
1811 zslStream = newStream;
1812 }
1813 }
1814 }
1815
1816 /* If a zsl stream is set, we know that we have configured at least one input or
1817 bidirectional stream */
1818 if (NULL != zslStream) {
1819 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1820 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1821 mInputStreamInfo.format = zslStream->format;
1822 mInputStreamInfo.usage = zslStream->usage;
1823 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1824 mInputStreamInfo.dim.width,
1825 mInputStreamInfo.dim.height,
1826 mInputStreamInfo.format, mInputStreamInfo.usage);
1827 }
1828
1829 cleanAndSortStreamInfo();
1830 if (mMetadataChannel) {
1831 delete mMetadataChannel;
1832 mMetadataChannel = NULL;
1833 }
1834 if (mSupportChannel) {
1835 delete mSupportChannel;
1836 mSupportChannel = NULL;
1837 }
1838
1839 if (mAnalysisChannel) {
1840 delete mAnalysisChannel;
1841 mAnalysisChannel = NULL;
1842 }
1843
1844 if (mDummyBatchChannel) {
1845 delete mDummyBatchChannel;
1846 mDummyBatchChannel = NULL;
1847 }
1848
1849 //Create metadata channel and initialize it
1850 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1851 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1852 gCamCapability[mCameraId]->color_arrangement);
1853 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1854 mChannelHandle, mCameraHandle->ops, captureResultCb,
1855 &padding_info, metadataFeatureMask, this);
1856 if (mMetadataChannel == NULL) {
1857 LOGE("failed to allocate metadata channel");
1858 rc = -ENOMEM;
1859 pthread_mutex_unlock(&mMutex);
1860 return rc;
1861 }
1862 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1863 if (rc < 0) {
1864 LOGE("metadata channel initialization failed");
1865 delete mMetadataChannel;
1866 mMetadataChannel = NULL;
1867 pthread_mutex_unlock(&mMutex);
1868 return rc;
1869 }
1870
1871 // Create analysis stream all the time, even when h/w support is not available
1872 {
1873 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1874 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1875 gCamCapability[mCameraId]->color_arrangement);
1876 cam_analysis_info_t analysisInfo;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001877 int32_t ret = NO_ERROR;
1878 ret = mCommon.getAnalysisInfo(
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 FALSE,
1880 TRUE,
1881 analysisFeatureMask,
1882 &analysisInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001883 if (ret == NO_ERROR) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001884 mAnalysisChannel = new QCamera3SupportChannel(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001885 mCameraHandle->camera_handle,
1886 mChannelHandle,
1887 mCameraHandle->ops,
1888 &analysisInfo.analysis_padding_info,
1889 analysisFeatureMask,
1890 CAM_STREAM_TYPE_ANALYSIS,
1891 &analysisInfo.analysis_max_res,
1892 (analysisInfo.analysis_format
1893 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1894 : CAM_FORMAT_YUV_420_NV21),
1895 analysisInfo.hw_analysis_supported,
1896 gCamCapability[mCameraId]->color_arrangement,
1897 this,
1898 0); // force buffer count to 0
1899 } else {
1900 LOGW("getAnalysisInfo failed, ret = %d", ret);
1901 }
1902 if (!mAnalysisChannel) {
1903 LOGW("Analysis channel cannot be created");
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 }
1905 }
1906
1907 bool isRawStreamRequested = false;
1908 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1909 /* Allocate channel objects for the requested streams */
1910 for (size_t i = 0; i < streamList->num_streams; i++) {
1911 camera3_stream_t *newStream = streamList->streams[i];
1912 uint32_t stream_usage = newStream->usage;
1913 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1914 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1915 struct camera_info *p_info = NULL;
1916 pthread_mutex_lock(&gCamLock);
1917 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1918 pthread_mutex_unlock(&gCamLock);
1919 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1920 || IS_USAGE_ZSL(newStream->usage)) &&
1921 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1922 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1923 if (bUseCommonFeatureMask) {
1924 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1925 commonFeatureMask;
1926 } else {
1927 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1928 CAM_QCOM_FEATURE_NONE;
1929 }
1930
1931 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1932 LOGH("Input stream configured, reprocess config");
1933 } else {
1934 //for non zsl streams find out the format
1935 switch (newStream->format) {
1936 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1937 {
1938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1939 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1940 /* add additional features to pp feature mask */
1941 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1942 mStreamConfigInfo.num_streams);
1943
1944 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1945 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1946 CAM_STREAM_TYPE_VIDEO;
1947 if (m_bTnrEnabled && m_bTnrVideo) {
1948 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1949 CAM_QCOM_FEATURE_CPP_TNR;
1950 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1951 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1952 ~CAM_QCOM_FEATURE_CDS;
1953 }
1954 } else {
1955 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1956 CAM_STREAM_TYPE_PREVIEW;
1957 if (m_bTnrEnabled && m_bTnrPreview) {
1958 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1959 CAM_QCOM_FEATURE_CPP_TNR;
1960 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1961 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1962 ~CAM_QCOM_FEATURE_CDS;
1963 }
1964 padding_info.width_padding = mSurfaceStridePadding;
1965 padding_info.height_padding = CAM_PAD_TO_2;
1966 }
1967 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1968 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1969 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1970 newStream->height;
1971 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1972 newStream->width;
1973 }
1974 }
1975 break;
1976 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1978 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1979 if (bUseCommonFeatureMask)
1980 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1981 commonFeatureMask;
1982 else
1983 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1984 CAM_QCOM_FEATURE_NONE;
1985 } else {
1986 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1987 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1988 }
1989 break;
1990 case HAL_PIXEL_FORMAT_BLOB:
1991 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1992 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1993 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1994 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1995 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1996 } else {
1997 if (bUseCommonFeatureMask &&
1998 isOnEncoder(maxViewfinderSize, newStream->width,
1999 newStream->height)) {
2000 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2001 } else {
2002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2003 }
2004 }
2005 if (isZsl) {
2006 if (zslStream) {
2007 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2008 (int32_t)zslStream->width;
2009 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2010 (int32_t)zslStream->height;
2011 } else {
2012 LOGE("Error, No ZSL stream identified");
2013 pthread_mutex_unlock(&mMutex);
2014 return -EINVAL;
2015 }
2016 } else if (m_bIs4KVideo) {
2017 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2018 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2019 } else if (bYuv888OverrideJpeg) {
2020 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2021 (int32_t)largeYuv888Size.width;
2022 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2023 (int32_t)largeYuv888Size.height;
2024 }
2025 break;
2026 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2027 case HAL_PIXEL_FORMAT_RAW16:
2028 case HAL_PIXEL_FORMAT_RAW10:
2029 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2030 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2031 isRawStreamRequested = true;
2032 break;
2033 default:
2034 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2035 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2036 break;
2037 }
2038 }
2039
2040 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2041 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2042 gCamCapability[mCameraId]->color_arrangement);
2043
2044 if (newStream->priv == NULL) {
2045 //New stream, construct channel
2046 switch (newStream->stream_type) {
2047 case CAMERA3_STREAM_INPUT:
2048 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2049 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2050 break;
2051 case CAMERA3_STREAM_BIDIRECTIONAL:
2052 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2053 GRALLOC_USAGE_HW_CAMERA_WRITE;
2054 break;
2055 case CAMERA3_STREAM_OUTPUT:
2056 /* For video encoding stream, set read/write rarely
2057 * flag so that they may be set to un-cached */
2058 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2059 newStream->usage |=
2060 (GRALLOC_USAGE_SW_READ_RARELY |
2061 GRALLOC_USAGE_SW_WRITE_RARELY |
2062 GRALLOC_USAGE_HW_CAMERA_WRITE);
2063 else if (IS_USAGE_ZSL(newStream->usage))
2064 {
2065 LOGD("ZSL usage flag skipping");
2066 }
2067 else if (newStream == zslStream
2068 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2069 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2070 } else
2071 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2072 break;
2073 default:
2074 LOGE("Invalid stream_type %d", newStream->stream_type);
2075 break;
2076 }
2077
2078 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2079 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2080 QCamera3ProcessingChannel *channel = NULL;
2081 switch (newStream->format) {
2082 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2083 if ((newStream->usage &
2084 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2085 (streamList->operation_mode ==
2086 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2087 ) {
2088 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2089 mChannelHandle, mCameraHandle->ops, captureResultCb,
2090 &gCamCapability[mCameraId]->padding_info,
2091 this,
2092 newStream,
2093 (cam_stream_type_t)
2094 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2095 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2096 mMetadataChannel,
2097 0); //heap buffers are not required for HFR video channel
2098 if (channel == NULL) {
2099 LOGE("allocation of channel failed");
2100 pthread_mutex_unlock(&mMutex);
2101 return -ENOMEM;
2102 }
2103 //channel->getNumBuffers() will return 0 here so use
2104 //MAX_INFLIGH_HFR_REQUESTS
2105 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2106 newStream->priv = channel;
2107 LOGI("num video buffers in HFR mode: %d",
2108 MAX_INFLIGHT_HFR_REQUESTS);
2109 } else {
2110 /* Copy stream contents in HFR preview only case to create
2111 * dummy batch channel so that sensor streaming is in
2112 * HFR mode */
2113 if (!m_bIsVideo && (streamList->operation_mode ==
2114 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2115 mDummyBatchStream = *newStream;
2116 }
2117 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2118 mChannelHandle, mCameraHandle->ops, captureResultCb,
2119 &gCamCapability[mCameraId]->padding_info,
2120 this,
2121 newStream,
2122 (cam_stream_type_t)
2123 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2124 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2125 mMetadataChannel,
2126 MAX_INFLIGHT_REQUESTS);
2127 if (channel == NULL) {
2128 LOGE("allocation of channel failed");
2129 pthread_mutex_unlock(&mMutex);
2130 return -ENOMEM;
2131 }
2132 newStream->max_buffers = channel->getNumBuffers();
2133 newStream->priv = channel;
2134 }
2135 break;
2136 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2137 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2138 mChannelHandle,
2139 mCameraHandle->ops, captureResultCb,
2140 &padding_info,
2141 this,
2142 newStream,
2143 (cam_stream_type_t)
2144 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2145 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2146 mMetadataChannel);
2147 if (channel == NULL) {
2148 LOGE("allocation of YUV channel failed");
2149 pthread_mutex_unlock(&mMutex);
2150 return -ENOMEM;
2151 }
2152 newStream->max_buffers = channel->getNumBuffers();
2153 newStream->priv = channel;
2154 break;
2155 }
2156 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2157 case HAL_PIXEL_FORMAT_RAW16:
2158 case HAL_PIXEL_FORMAT_RAW10:
2159 mRawChannel = new QCamera3RawChannel(
2160 mCameraHandle->camera_handle, mChannelHandle,
2161 mCameraHandle->ops, captureResultCb,
2162 &padding_info,
2163 this, newStream,
2164 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2165 mMetadataChannel,
2166 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2167 if (mRawChannel == NULL) {
2168 LOGE("allocation of raw channel failed");
2169 pthread_mutex_unlock(&mMutex);
2170 return -ENOMEM;
2171 }
2172 newStream->max_buffers = mRawChannel->getNumBuffers();
2173 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2174 break;
2175 case HAL_PIXEL_FORMAT_BLOB:
2176 // Max live snapshot inflight buffer is 1. This is to mitigate
2177 // frame drop issues for video snapshot. The more buffers being
2178 // allocated, the more frame drops there are.
2179 mPictureChannel = new QCamera3PicChannel(
2180 mCameraHandle->camera_handle, mChannelHandle,
2181 mCameraHandle->ops, captureResultCb,
2182 &padding_info, this, newStream,
2183 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2184 m_bIs4KVideo, isZsl, mMetadataChannel,
2185 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2186 if (mPictureChannel == NULL) {
2187 LOGE("allocation of channel failed");
2188 pthread_mutex_unlock(&mMutex);
2189 return -ENOMEM;
2190 }
2191 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2192 newStream->max_buffers = mPictureChannel->getNumBuffers();
2193 mPictureChannel->overrideYuvSize(
2194 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2195 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2196 break;
2197
2198 default:
2199 LOGE("not a supported format 0x%x", newStream->format);
2200 break;
2201 }
2202 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2203 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2204 } else {
2205 LOGE("Error, Unknown stream type");
2206 pthread_mutex_unlock(&mMutex);
2207 return -EINVAL;
2208 }
2209
2210 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2211 if (channel != NULL && channel->isUBWCEnabled()) {
2212 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002213 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2214 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002215 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2216 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2217 }
2218 }
2219
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2224 break;
2225 }
2226 }
2227 } else {
2228 // Channel already exists for this stream
2229 // Do nothing for now
2230 }
2231 padding_info = gCamCapability[mCameraId]->padding_info;
2232
2233 /* Do not add entries for input stream in metastream info
2234 * since there is no real stream associated with it
2235 */
2236 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2237 mStreamConfigInfo.num_streams++;
2238 }
2239
2240 //RAW DUMP channel
2241 if (mEnableRawDump && isRawStreamRequested == false){
2242 cam_dimension_t rawDumpSize;
2243 rawDumpSize = getMaxRawSize(mCameraId);
2244 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2245 setPAAFSupport(rawDumpFeatureMask,
2246 CAM_STREAM_TYPE_RAW,
2247 gCamCapability[mCameraId]->color_arrangement);
2248 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2249 mChannelHandle,
2250 mCameraHandle->ops,
2251 rawDumpSize,
2252 &padding_info,
2253 this, rawDumpFeatureMask);
2254 if (!mRawDumpChannel) {
2255 LOGE("Raw Dump channel cannot be created");
2256 pthread_mutex_unlock(&mMutex);
2257 return -ENOMEM;
2258 }
2259 }
2260
2261
2262 if (mAnalysisChannel) {
2263 cam_analysis_info_t analysisInfo;
2264 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2265 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2266 CAM_STREAM_TYPE_ANALYSIS;
2267 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2268 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2269 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2270 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2271 gCamCapability[mCameraId]->color_arrangement);
2272 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2273 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2274 &analysisInfo);
2275 if (rc != NO_ERROR) {
2276 LOGE("getAnalysisInfo failed, ret = %d", rc);
2277 pthread_mutex_unlock(&mMutex);
2278 return rc;
2279 }
2280 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2281 analysisInfo.analysis_max_res;
2282 mStreamConfigInfo.num_streams++;
2283 }
2284
2285 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2286 cam_analysis_info_t supportInfo;
2287 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2288 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2289 setPAAFSupport(callbackFeatureMask,
2290 CAM_STREAM_TYPE_CALLBACK,
2291 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002292 int32_t ret = NO_ERROR;
2293 ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2294 if (ret != NO_ERROR) {
2295 /* Ignore the error for Mono camera
2296 * because the PAAF bit mask is only set
2297 * for CAM_STREAM_TYPE_ANALYSIS stream type
2298 */
2299 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2300 LOGW("getAnalysisInfo failed, ret = %d", ret);
2301 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002302 }
2303 mSupportChannel = new QCamera3SupportChannel(
2304 mCameraHandle->camera_handle,
2305 mChannelHandle,
2306 mCameraHandle->ops,
2307 &gCamCapability[mCameraId]->padding_info,
2308 callbackFeatureMask,
2309 CAM_STREAM_TYPE_CALLBACK,
2310 &QCamera3SupportChannel::kDim,
2311 CAM_FORMAT_YUV_420_NV21,
2312 supportInfo.hw_analysis_supported,
2313 gCamCapability[mCameraId]->color_arrangement,
2314 this);
2315 if (!mSupportChannel) {
2316 LOGE("dummy channel cannot be created");
2317 pthread_mutex_unlock(&mMutex);
2318 return -ENOMEM;
2319 }
2320 }
2321
2322 if (mSupportChannel) {
2323 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2324 QCamera3SupportChannel::kDim;
2325 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2326 CAM_STREAM_TYPE_CALLBACK;
2327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2328 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2329 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2330 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2331 gCamCapability[mCameraId]->color_arrangement);
2332 mStreamConfigInfo.num_streams++;
2333 }
2334
2335 if (mRawDumpChannel) {
2336 cam_dimension_t rawSize;
2337 rawSize = getMaxRawSize(mCameraId);
2338 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2339 rawSize;
2340 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2341 CAM_STREAM_TYPE_RAW;
2342 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2343 CAM_QCOM_FEATURE_NONE;
2344 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2345 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2346 gCamCapability[mCameraId]->color_arrangement);
2347 mStreamConfigInfo.num_streams++;
2348 }
2349 /* In HFR mode, if video stream is not added, create a dummy channel so that
2350 * ISP can create a batch mode even for preview only case. This channel is
2351 * never 'start'ed (no stream-on), it is only 'initialized' */
2352 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2353 !m_bIsVideo) {
2354 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2355 setPAAFSupport(dummyFeatureMask,
2356 CAM_STREAM_TYPE_VIDEO,
2357 gCamCapability[mCameraId]->color_arrangement);
2358 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2359 mChannelHandle,
2360 mCameraHandle->ops, captureResultCb,
2361 &gCamCapability[mCameraId]->padding_info,
2362 this,
2363 &mDummyBatchStream,
2364 CAM_STREAM_TYPE_VIDEO,
2365 dummyFeatureMask,
2366 mMetadataChannel);
2367 if (NULL == mDummyBatchChannel) {
2368 LOGE("creation of mDummyBatchChannel failed."
2369 "Preview will use non-hfr sensor mode ");
2370 }
2371 }
2372 if (mDummyBatchChannel) {
2373 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2374 mDummyBatchStream.width;
2375 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2376 mDummyBatchStream.height;
2377 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2378 CAM_STREAM_TYPE_VIDEO;
2379 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2380 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2381 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2382 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2383 gCamCapability[mCameraId]->color_arrangement);
2384 mStreamConfigInfo.num_streams++;
2385 }
2386
2387 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2388 mStreamConfigInfo.buffer_info.max_buffers =
2389 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2390
2391 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2392 for (pendingRequestIterator i = mPendingRequestsList.begin();
2393 i != mPendingRequestsList.end();) {
2394 i = erasePendingRequest(i);
2395 }
2396 mPendingFrameDropList.clear();
2397 // Initialize/Reset the pending buffers list
2398 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2399 req.mPendingBufferList.clear();
2400 }
2401 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2402
2403 mPendingReprocessResultList.clear();
2404
2405 mCurJpegMeta.clear();
2406 //Get min frame duration for this streams configuration
2407 deriveMinFrameDuration();
2408
2409 // Update state
2410 mState = CONFIGURED;
2411
2412 pthread_mutex_unlock(&mMutex);
2413
2414 return rc;
2415}
2416
2417/*===========================================================================
2418 * FUNCTION : validateCaptureRequest
2419 *
2420 * DESCRIPTION: validate a capture request from camera service
2421 *
2422 * PARAMETERS :
2423 * @request : request from framework to process
2424 *
2425 * RETURN :
2426 *
2427 *==========================================================================*/
2428int QCamera3HardwareInterface::validateCaptureRequest(
2429 camera3_capture_request_t *request)
2430{
2431 ssize_t idx = 0;
2432 const camera3_stream_buffer_t *b;
2433 CameraMetadata meta;
2434
2435 /* Sanity check the request */
2436 if (request == NULL) {
2437 LOGE("NULL capture request");
2438 return BAD_VALUE;
2439 }
2440
2441 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2442 /*settings cannot be null for the first request*/
2443 return BAD_VALUE;
2444 }
2445
2446 uint32_t frameNumber = request->frame_number;
2447 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2448 LOGE("Request %d: No output buffers provided!",
2449 __FUNCTION__, frameNumber);
2450 return BAD_VALUE;
2451 }
2452 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2453 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2454 request->num_output_buffers, MAX_NUM_STREAMS);
2455 return BAD_VALUE;
2456 }
2457 if (request->input_buffer != NULL) {
2458 b = request->input_buffer;
2459 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2460 LOGE("Request %d: Buffer %ld: Status not OK!",
2461 frameNumber, (long)idx);
2462 return BAD_VALUE;
2463 }
2464 if (b->release_fence != -1) {
2465 LOGE("Request %d: Buffer %ld: Has a release fence!",
2466 frameNumber, (long)idx);
2467 return BAD_VALUE;
2468 }
2469 if (b->buffer == NULL) {
2470 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2471 frameNumber, (long)idx);
2472 return BAD_VALUE;
2473 }
2474 }
2475
2476 // Validate all buffers
2477 b = request->output_buffers;
2478 do {
2479 QCamera3ProcessingChannel *channel =
2480 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2481 if (channel == NULL) {
2482 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2483 frameNumber, (long)idx);
2484 return BAD_VALUE;
2485 }
2486 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2487 LOGE("Request %d: Buffer %ld: Status not OK!",
2488 frameNumber, (long)idx);
2489 return BAD_VALUE;
2490 }
2491 if (b->release_fence != -1) {
2492 LOGE("Request %d: Buffer %ld: Has a release fence!",
2493 frameNumber, (long)idx);
2494 return BAD_VALUE;
2495 }
2496 if (b->buffer == NULL) {
2497 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2498 frameNumber, (long)idx);
2499 return BAD_VALUE;
2500 }
2501 if (*(b->buffer) == NULL) {
2502 LOGE("Request %d: Buffer %ld: NULL private handle!",
2503 frameNumber, (long)idx);
2504 return BAD_VALUE;
2505 }
2506 idx++;
2507 b = request->output_buffers + idx;
2508 } while (idx < (ssize_t)request->num_output_buffers);
2509
2510 return NO_ERROR;
2511}
2512
2513/*===========================================================================
2514 * FUNCTION : deriveMinFrameDuration
2515 *
2516 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2517 * on currently configured streams.
2518 *
2519 * PARAMETERS : NONE
2520 *
2521 * RETURN : NONE
2522 *
2523 *==========================================================================*/
2524void QCamera3HardwareInterface::deriveMinFrameDuration()
2525{
2526 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2527
2528 maxJpegDim = 0;
2529 maxProcessedDim = 0;
2530 maxRawDim = 0;
2531
2532 // Figure out maximum jpeg, processed, and raw dimensions
2533 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2534 it != mStreamInfo.end(); it++) {
2535
2536 // Input stream doesn't have valid stream_type
2537 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2538 continue;
2539
2540 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2541 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2542 if (dimension > maxJpegDim)
2543 maxJpegDim = dimension;
2544 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2545 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2546 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2547 if (dimension > maxRawDim)
2548 maxRawDim = dimension;
2549 } else {
2550 if (dimension > maxProcessedDim)
2551 maxProcessedDim = dimension;
2552 }
2553 }
2554
2555 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2556 MAX_SIZES_CNT);
2557
2558 //Assume all jpeg dimensions are in processed dimensions.
2559 if (maxJpegDim > maxProcessedDim)
2560 maxProcessedDim = maxJpegDim;
2561 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2562 if (maxProcessedDim > maxRawDim) {
2563 maxRawDim = INT32_MAX;
2564
2565 for (size_t i = 0; i < count; i++) {
2566 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2567 gCamCapability[mCameraId]->raw_dim[i].height;
2568 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2569 maxRawDim = dimension;
2570 }
2571 }
2572
2573 //Find minimum durations for processed, jpeg, and raw
2574 for (size_t i = 0; i < count; i++) {
2575 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2576 gCamCapability[mCameraId]->raw_dim[i].height) {
2577 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2578 break;
2579 }
2580 }
2581 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2582 for (size_t i = 0; i < count; i++) {
2583 if (maxProcessedDim ==
2584 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2585 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2586 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2587 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2588 break;
2589 }
2590 }
2591}
2592
2593/*===========================================================================
2594 * FUNCTION : getMinFrameDuration
2595 *
2596 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2597 * and current request configuration.
2598 *
2599 * PARAMETERS : @request: requset sent by the frameworks
2600 *
2601 * RETURN : min farme duration for a particular request
2602 *
2603 *==========================================================================*/
2604int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2605{
2606 bool hasJpegStream = false;
2607 bool hasRawStream = false;
2608 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2609 const camera3_stream_t *stream = request->output_buffers[i].stream;
2610 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2611 hasJpegStream = true;
2612 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2613 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2614 stream->format == HAL_PIXEL_FORMAT_RAW16)
2615 hasRawStream = true;
2616 }
2617
2618 if (!hasJpegStream)
2619 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2620 else
2621 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2622}
2623
2624/*===========================================================================
2625 * FUNCTION : handleBuffersDuringFlushLock
2626 *
2627 * DESCRIPTION: Account for buffers returned from back-end during flush
2628 * This function is executed while mMutex is held by the caller.
2629 *
2630 * PARAMETERS :
2631 * @buffer: image buffer for the callback
2632 *
2633 * RETURN :
2634 *==========================================================================*/
2635void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2636{
2637 bool buffer_found = false;
2638 for (List<PendingBuffersInRequest>::iterator req =
2639 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2640 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2641 for (List<PendingBufferInfo>::iterator i =
2642 req->mPendingBufferList.begin();
2643 i != req->mPendingBufferList.end(); i++) {
2644 if (i->buffer == buffer->buffer) {
2645 mPendingBuffersMap.numPendingBufsAtFlush--;
2646 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2647 buffer->buffer, req->frame_number,
2648 mPendingBuffersMap.numPendingBufsAtFlush);
2649 buffer_found = true;
2650 break;
2651 }
2652 }
2653 if (buffer_found) {
2654 break;
2655 }
2656 }
2657 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2658 //signal the flush()
2659 LOGD("All buffers returned to HAL. Continue flush");
2660 pthread_cond_signal(&mBuffersCond);
2661 }
2662}
2663
2664
2665/*===========================================================================
2666 * FUNCTION : handlePendingReprocResults
2667 *
2668 * DESCRIPTION: check and notify on any pending reprocess results
2669 *
2670 * PARAMETERS :
2671 * @frame_number : Pending request frame number
2672 *
2673 * RETURN : int32_t type of status
2674 * NO_ERROR -- success
2675 * none-zero failure code
2676 *==========================================================================*/
2677int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2678{
2679 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2680 j != mPendingReprocessResultList.end(); j++) {
2681 if (j->frame_number == frame_number) {
2682 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2683
2684 LOGD("Delayed reprocess notify %d",
2685 frame_number);
2686
2687 for (pendingRequestIterator k = mPendingRequestsList.begin();
2688 k != mPendingRequestsList.end(); k++) {
2689
2690 if (k->frame_number == j->frame_number) {
2691 LOGD("Found reprocess frame number %d in pending reprocess List "
2692 "Take it out!!",
2693 k->frame_number);
2694
2695 camera3_capture_result result;
2696 memset(&result, 0, sizeof(camera3_capture_result));
2697 result.frame_number = frame_number;
2698 result.num_output_buffers = 1;
2699 result.output_buffers = &j->buffer;
2700 result.input_buffer = k->input_buffer;
2701 result.result = k->settings;
2702 result.partial_result = PARTIAL_RESULT_COUNT;
2703 mCallbackOps->process_capture_result(mCallbackOps, &result);
2704
2705 erasePendingRequest(k);
2706 break;
2707 }
2708 }
2709 mPendingReprocessResultList.erase(j);
2710 break;
2711 }
2712 }
2713 return NO_ERROR;
2714}
2715
2716/*===========================================================================
2717 * FUNCTION : handleBatchMetadata
2718 *
2719 * DESCRIPTION: Handles metadata buffer callback in batch mode
2720 *
2721 * PARAMETERS : @metadata_buf: metadata buffer
2722 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2723 * the meta buf in this method
2724 *
2725 * RETURN :
2726 *
2727 *==========================================================================*/
2728void QCamera3HardwareInterface::handleBatchMetadata(
2729 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2730{
2731 ATRACE_CALL();
2732
2733 if (NULL == metadata_buf) {
2734 LOGE("metadata_buf is NULL");
2735 return;
2736 }
2737 /* In batch mode, the metdata will contain the frame number and timestamp of
2738 * the last frame in the batch. Eg: a batch containing buffers from request
2739 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2740 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2741 * multiple process_capture_results */
2742 metadata_buffer_t *metadata =
2743 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2744 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2745 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2746 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2747 uint32_t frame_number = 0, urgent_frame_number = 0;
2748 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2749 bool invalid_metadata = false;
2750 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2751 size_t loopCount = 1;
2752
2753 int32_t *p_frame_number_valid =
2754 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2755 uint32_t *p_frame_number =
2756 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2757 int64_t *p_capture_time =
2758 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2759 int32_t *p_urgent_frame_number_valid =
2760 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2761 uint32_t *p_urgent_frame_number =
2762 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2763
2764 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2765 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2766 (NULL == p_urgent_frame_number)) {
2767 LOGE("Invalid metadata");
2768 invalid_metadata = true;
2769 } else {
2770 frame_number_valid = *p_frame_number_valid;
2771 last_frame_number = *p_frame_number;
2772 last_frame_capture_time = *p_capture_time;
2773 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2774 last_urgent_frame_number = *p_urgent_frame_number;
2775 }
2776
2777 /* In batchmode, when no video buffers are requested, set_parms are sent
2778 * for every capture_request. The difference between consecutive urgent
2779 * frame numbers and frame numbers should be used to interpolate the
2780 * corresponding frame numbers and time stamps */
2781 pthread_mutex_lock(&mMutex);
2782 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002783 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2784 if(idx < 0) {
2785 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2786 last_urgent_frame_number);
2787 mState = ERROR;
2788 pthread_mutex_unlock(&mMutex);
2789 return;
2790 }
2791 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002792 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2793 first_urgent_frame_number;
2794
2795 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2796 urgent_frame_number_valid,
2797 first_urgent_frame_number, last_urgent_frame_number);
2798 }
2799
2800 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002801 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2802 if(idx < 0) {
2803 LOGE("Invalid frame number received: %d. Irrecoverable error",
2804 last_frame_number);
2805 mState = ERROR;
2806 pthread_mutex_unlock(&mMutex);
2807 return;
2808 }
2809 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002810 frameNumDiff = last_frame_number + 1 -
2811 first_frame_number;
2812 mPendingBatchMap.removeItem(last_frame_number);
2813
2814 LOGD("frm: valid: %d frm_num: %d - %d",
2815 frame_number_valid,
2816 first_frame_number, last_frame_number);
2817
2818 }
2819 pthread_mutex_unlock(&mMutex);
2820
2821 if (urgent_frame_number_valid || frame_number_valid) {
2822 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2823 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2824 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2825 urgentFrameNumDiff, last_urgent_frame_number);
2826 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2827 LOGE("frameNumDiff: %d frameNum: %d",
2828 frameNumDiff, last_frame_number);
2829 }
2830
2831 for (size_t i = 0; i < loopCount; i++) {
2832 /* handleMetadataWithLock is called even for invalid_metadata for
2833 * pipeline depth calculation */
2834 if (!invalid_metadata) {
2835 /* Infer frame number. Batch metadata contains frame number of the
2836 * last frame */
2837 if (urgent_frame_number_valid) {
2838 if (i < urgentFrameNumDiff) {
2839 urgent_frame_number =
2840 first_urgent_frame_number + i;
2841 LOGD("inferred urgent frame_number: %d",
2842 urgent_frame_number);
2843 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2844 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2845 } else {
2846 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2847 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2848 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2849 }
2850 }
2851
2852 /* Infer frame number. Batch metadata contains frame number of the
2853 * last frame */
2854 if (frame_number_valid) {
2855 if (i < frameNumDiff) {
2856 frame_number = first_frame_number + i;
2857 LOGD("inferred frame_number: %d", frame_number);
2858 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2859 CAM_INTF_META_FRAME_NUMBER, frame_number);
2860 } else {
2861 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2862 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2863 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2864 }
2865 }
2866
2867 if (last_frame_capture_time) {
2868 //Infer timestamp
2869 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002870 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002871 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002872 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002873 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2874 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2875 LOGD("batch capture_time: %lld, capture_time: %lld",
2876 last_frame_capture_time, capture_time);
2877 }
2878 }
2879 pthread_mutex_lock(&mMutex);
2880 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002881 false /* free_and_bufdone_meta_buf */,
2882 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 pthread_mutex_unlock(&mMutex);
2884 }
2885
2886 /* BufDone metadata buffer */
2887 if (free_and_bufdone_meta_buf) {
2888 mMetadataChannel->bufDone(metadata_buf);
2889 free(metadata_buf);
2890 }
2891}
2892
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002893void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2894 camera3_error_msg_code_t errorCode)
2895{
2896 camera3_notify_msg_t notify_msg;
2897 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2898 notify_msg.type = CAMERA3_MSG_ERROR;
2899 notify_msg.message.error.error_code = errorCode;
2900 notify_msg.message.error.error_stream = NULL;
2901 notify_msg.message.error.frame_number = frameNumber;
2902 mCallbackOps->notify(mCallbackOps, &notify_msg);
2903
2904 return;
2905}
Thierry Strudel3d639192016-09-09 11:52:26 -07002906/*===========================================================================
2907 * FUNCTION : handleMetadataWithLock
2908 *
2909 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2910 *
2911 * PARAMETERS : @metadata_buf: metadata buffer
2912 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2913 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002914 * @firstMetadataInBatch: Boolean to indicate whether this is the
2915 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002916 *
2917 * RETURN :
2918 *
2919 *==========================================================================*/
2920void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002921 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2922 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002923{
2924 ATRACE_CALL();
2925 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2926 //during flush do not send metadata from this thread
2927 LOGD("not sending metadata during flush or when mState is error");
2928 if (free_and_bufdone_meta_buf) {
2929 mMetadataChannel->bufDone(metadata_buf);
2930 free(metadata_buf);
2931 }
2932 return;
2933 }
2934
2935 //not in flush
2936 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2937 int32_t frame_number_valid, urgent_frame_number_valid;
2938 uint32_t frame_number, urgent_frame_number;
2939 int64_t capture_time;
2940 nsecs_t currentSysTime;
2941
2942 int32_t *p_frame_number_valid =
2943 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2944 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2945 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2946 int32_t *p_urgent_frame_number_valid =
2947 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2948 uint32_t *p_urgent_frame_number =
2949 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2950 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2951 metadata) {
2952 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2953 *p_frame_number_valid, *p_frame_number);
2954 }
2955
2956 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2957 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2958 LOGE("Invalid metadata");
2959 if (free_and_bufdone_meta_buf) {
2960 mMetadataChannel->bufDone(metadata_buf);
2961 free(metadata_buf);
2962 }
2963 goto done_metadata;
2964 }
2965 frame_number_valid = *p_frame_number_valid;
2966 frame_number = *p_frame_number;
2967 capture_time = *p_capture_time;
2968 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2969 urgent_frame_number = *p_urgent_frame_number;
2970 currentSysTime = systemTime(CLOCK_MONOTONIC);
2971
2972 // Detect if buffers from any requests are overdue
2973 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2974 if ( (currentSysTime - req.timestamp) >
2975 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2976 for (auto &missed : req.mPendingBufferList) {
2977 LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2978 "stream type = %d, stream format = %d",
2979 frame_number, req.frame_number, missed.buffer,
2980 missed.stream->stream_type, missed.stream->format);
2981 }
2982 }
2983 }
2984 //Partial result on process_capture_result for timestamp
2985 if (urgent_frame_number_valid) {
2986 LOGD("valid urgent frame_number = %u, capture_time = %lld",
2987 urgent_frame_number, capture_time);
2988
2989 //Recieved an urgent Frame Number, handle it
2990 //using partial results
2991 for (pendingRequestIterator i =
2992 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2993 LOGD("Iterator Frame = %d urgent frame = %d",
2994 i->frame_number, urgent_frame_number);
2995
2996 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2997 (i->partial_result_cnt == 0)) {
2998 LOGE("Error: HAL missed urgent metadata for frame number %d",
2999 i->frame_number);
3000 }
3001
3002 if (i->frame_number == urgent_frame_number &&
3003 i->bUrgentReceived == 0) {
3004
3005 camera3_capture_result_t result;
3006 memset(&result, 0, sizeof(camera3_capture_result_t));
3007
3008 i->partial_result_cnt++;
3009 i->bUrgentReceived = 1;
3010 // Extract 3A metadata
3011 result.result =
3012 translateCbUrgentMetadataToResultMetadata(metadata);
3013 // Populate metadata result
3014 result.frame_number = urgent_frame_number;
3015 result.num_output_buffers = 0;
3016 result.output_buffers = NULL;
3017 result.partial_result = i->partial_result_cnt;
3018
3019 mCallbackOps->process_capture_result(mCallbackOps, &result);
3020 LOGD("urgent frame_number = %u, capture_time = %lld",
3021 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003022 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3023 // Instant AEC settled for this frame.
3024 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3025 mInstantAECSettledFrameNumber = urgent_frame_number;
3026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 free_camera_metadata((camera_metadata_t *)result.result);
3028 break;
3029 }
3030 }
3031 }
3032
3033 if (!frame_number_valid) {
3034 LOGD("Not a valid normal frame number, used as SOF only");
3035 if (free_and_bufdone_meta_buf) {
3036 mMetadataChannel->bufDone(metadata_buf);
3037 free(metadata_buf);
3038 }
3039 goto done_metadata;
3040 }
3041 LOGH("valid frame_number = %u, capture_time = %lld",
3042 frame_number, capture_time);
3043
3044 for (pendingRequestIterator i = mPendingRequestsList.begin();
3045 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3046 // Flush out all entries with less or equal frame numbers.
3047
3048 camera3_capture_result_t result;
3049 memset(&result, 0, sizeof(camera3_capture_result_t));
3050
3051 LOGD("frame_number in the list is %u", i->frame_number);
3052 i->partial_result_cnt++;
3053 result.partial_result = i->partial_result_cnt;
3054
3055 // Check whether any stream buffer corresponding to this is dropped or not
3056 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003057 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3058 if (p_cam_frame_drop ||
3059 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003060 /* Clear notify_msg structure */
3061 camera3_notify_msg_t notify_msg;
3062 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3063 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3064 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003065 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003066 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3067 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003068 if (p_cam_frame_drop) {
3069 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
3070 if (streamID == p_cam_frame_drop->streamID[k]) {
3071 // Got the stream ID for drop frame.
3072 dropFrame = true;
3073 break;
3074 }
3075 }
3076 } else {
3077 // This is instant AEC case.
3078 // For instant AEC drop the stream untill AEC is settled.
3079 dropFrame = true;
3080 }
3081 if (dropFrame) {
3082 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3083 if (p_cam_frame_drop) {
3084 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003085 LOGE("Start of reporting error frame#=%u, streamID=%u",
3086 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003087 } else {
3088 // For instant AEC, inform frame drop and frame number
3089 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3090 "AEC settled frame number = %u",
3091 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3092 }
3093 notify_msg.type = CAMERA3_MSG_ERROR;
3094 notify_msg.message.error.frame_number = i->frame_number;
3095 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3096 notify_msg.message.error.error_stream = j->stream;
3097 mCallbackOps->notify(mCallbackOps, &notify_msg);
3098 if (p_cam_frame_drop) {
3099 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003100 LOGE("End of reporting error frame#=%u, streamID=%u",
3101 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003102 } else {
3103 // For instant AEC, inform frame drop and frame number
3104 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3105 "AEC settled frame number = %u",
3106 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3107 }
3108 PendingFrameDropInfo PendingFrameDrop;
3109 PendingFrameDrop.frame_number=i->frame_number;
3110 PendingFrameDrop.stream_ID = streamID;
3111 // Add the Frame drop info to mPendingFrameDropList
3112 mPendingFrameDropList.push_back(PendingFrameDrop);
3113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 }
3115 }
3116
3117 // Send empty metadata with already filled buffers for dropped metadata
3118 // and send valid metadata with already filled buffers for current metadata
3119 /* we could hit this case when we either
3120 * 1. have a pending reprocess request or
3121 * 2. miss a metadata buffer callback */
3122 if (i->frame_number < frame_number) {
3123 if (i->input_buffer) {
3124 /* this will be handled in handleInputBufferWithLock */
3125 i++;
3126 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003127 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003128
3129 mPendingLiveRequest--;
3130
3131 CameraMetadata dummyMetadata;
3132 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3133 result.result = dummyMetadata.release();
3134
3135 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003136 }
3137 } else {
3138 mPendingLiveRequest--;
3139 /* Clear notify_msg structure */
3140 camera3_notify_msg_t notify_msg;
3141 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3142
3143 // Send shutter notify to frameworks
3144 notify_msg.type = CAMERA3_MSG_SHUTTER;
3145 notify_msg.message.shutter.frame_number = i->frame_number;
3146 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3147 mCallbackOps->notify(mCallbackOps, &notify_msg);
3148
3149 i->timestamp = capture_time;
3150
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003151 /* Set the timestamp in display metadata so that clients aware of
3152 private_handle such as VT can use this un-modified timestamps.
3153 Camera framework is unaware of this timestamp and cannot change this */
3154 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3155
Thierry Strudel3d639192016-09-09 11:52:26 -07003156 // Find channel requiring metadata, meaning internal offline postprocess
3157 // is needed.
3158 //TODO: for now, we don't support two streams requiring metadata at the same time.
3159 // (because we are not making copies, and metadata buffer is not reference counted.
3160 bool internalPproc = false;
3161 for (pendingBufferIterator iter = i->buffers.begin();
3162 iter != i->buffers.end(); iter++) {
3163 if (iter->need_metadata) {
3164 internalPproc = true;
3165 QCamera3ProcessingChannel *channel =
3166 (QCamera3ProcessingChannel *)iter->stream->priv;
3167 channel->queueReprocMetadata(metadata_buf);
3168 break;
3169 }
3170 }
3171
3172 result.result = translateFromHalMetadata(metadata,
3173 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003174 i->capture_intent, internalPproc, i->fwkCacMode,
3175 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003176
3177 saveExifParams(metadata);
3178
3179 if (i->blob_request) {
3180 {
3181 //Dump tuning metadata if enabled and available
3182 char prop[PROPERTY_VALUE_MAX];
3183 memset(prop, 0, sizeof(prop));
3184 property_get("persist.camera.dumpmetadata", prop, "0");
3185 int32_t enabled = atoi(prop);
3186 if (enabled && metadata->is_tuning_params_valid) {
3187 dumpMetadataToFile(metadata->tuning_params,
3188 mMetaFrameCount,
3189 enabled,
3190 "Snapshot",
3191 frame_number);
3192 }
3193 }
3194 }
3195
3196 if (!internalPproc) {
3197 LOGD("couldn't find need_metadata for this metadata");
3198 // Return metadata buffer
3199 if (free_and_bufdone_meta_buf) {
3200 mMetadataChannel->bufDone(metadata_buf);
3201 free(metadata_buf);
3202 }
3203 }
3204 }
3205 if (!result.result) {
3206 LOGE("metadata is NULL");
3207 }
3208 result.frame_number = i->frame_number;
3209 result.input_buffer = i->input_buffer;
3210 result.num_output_buffers = 0;
3211 result.output_buffers = NULL;
3212 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3213 j != i->buffers.end(); j++) {
3214 if (j->buffer) {
3215 result.num_output_buffers++;
3216 }
3217 }
3218
3219 updateFpsInPreviewBuffer(metadata, i->frame_number);
3220
3221 if (result.num_output_buffers > 0) {
3222 camera3_stream_buffer_t *result_buffers =
3223 new camera3_stream_buffer_t[result.num_output_buffers];
3224 if (result_buffers != NULL) {
3225 size_t result_buffers_idx = 0;
3226 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3227 j != i->buffers.end(); j++) {
3228 if (j->buffer) {
3229 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3230 m != mPendingFrameDropList.end(); m++) {
3231 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3232 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3233 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3234 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3235 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3236 frame_number, streamID);
3237 m = mPendingFrameDropList.erase(m);
3238 break;
3239 }
3240 }
3241 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3242 result_buffers[result_buffers_idx++] = *(j->buffer);
3243 free(j->buffer);
3244 j->buffer = NULL;
3245 }
3246 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003247
Thierry Strudel3d639192016-09-09 11:52:26 -07003248 result.output_buffers = result_buffers;
3249 mCallbackOps->process_capture_result(mCallbackOps, &result);
3250 LOGD("meta frame_number = %u, capture_time = %lld",
3251 result.frame_number, i->timestamp);
3252 free_camera_metadata((camera_metadata_t *)result.result);
3253 delete[] result_buffers;
3254 }else {
3255 LOGE("Fatal error: out of memory");
3256 }
3257 } else {
3258 mCallbackOps->process_capture_result(mCallbackOps, &result);
3259 LOGD("meta frame_number = %u, capture_time = %lld",
3260 result.frame_number, i->timestamp);
3261 free_camera_metadata((camera_metadata_t *)result.result);
3262 }
3263
3264 i = erasePendingRequest(i);
3265
3266 if (!mPendingReprocessResultList.empty()) {
3267 handlePendingReprocResults(frame_number + 1);
3268 }
3269 }
3270
3271done_metadata:
3272 for (pendingRequestIterator i = mPendingRequestsList.begin();
3273 i != mPendingRequestsList.end() ;i++) {
3274 i->pipeline_depth++;
3275 }
3276 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3277 unblockRequestIfNecessary();
3278}
3279
3280/*===========================================================================
3281 * FUNCTION : hdrPlusPerfLock
3282 *
3283 * DESCRIPTION: perf lock for HDR+ using custom intent
3284 *
3285 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3286 *
3287 * RETURN : None
3288 *
3289 *==========================================================================*/
3290void QCamera3HardwareInterface::hdrPlusPerfLock(
3291 mm_camera_super_buf_t *metadata_buf)
3292{
3293 if (NULL == metadata_buf) {
3294 LOGE("metadata_buf is NULL");
3295 return;
3296 }
3297 metadata_buffer_t *metadata =
3298 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3299 int32_t *p_frame_number_valid =
3300 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3301 uint32_t *p_frame_number =
3302 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3303
3304 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3305 LOGE("%s: Invalid metadata", __func__);
3306 return;
3307 }
3308
3309 //acquire perf lock for 5 sec after the last HDR frame is captured
3310 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3311 if ((p_frame_number != NULL) &&
3312 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3313 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3314 }
3315 }
3316
3317 //release lock after perf lock timer is expired. If lock is already released,
3318 //isTimerReset returns false
3319 if (m_perfLock.isTimerReset()) {
3320 mLastCustIntentFrmNum = -1;
3321 m_perfLock.lock_rel_timed();
3322 }
3323}
3324
3325/*===========================================================================
3326 * FUNCTION : handleInputBufferWithLock
3327 *
3328 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3329 *
3330 * PARAMETERS : @frame_number: frame number of the input buffer
3331 *
3332 * RETURN :
3333 *
3334 *==========================================================================*/
3335void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3336{
3337 ATRACE_CALL();
3338 pendingRequestIterator i = mPendingRequestsList.begin();
3339 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3340 i++;
3341 }
3342 if (i != mPendingRequestsList.end() && i->input_buffer) {
3343 //found the right request
3344 if (!i->shutter_notified) {
3345 CameraMetadata settings;
3346 camera3_notify_msg_t notify_msg;
3347 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3348 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3349 if(i->settings) {
3350 settings = i->settings;
3351 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3352 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3353 } else {
3354 LOGE("No timestamp in input settings! Using current one.");
3355 }
3356 } else {
3357 LOGE("Input settings missing!");
3358 }
3359
3360 notify_msg.type = CAMERA3_MSG_SHUTTER;
3361 notify_msg.message.shutter.frame_number = frame_number;
3362 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3363 mCallbackOps->notify(mCallbackOps, &notify_msg);
3364 i->shutter_notified = true;
3365 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3366 i->frame_number, notify_msg.message.shutter.timestamp);
3367 }
3368
3369 if (i->input_buffer->release_fence != -1) {
3370 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3371 close(i->input_buffer->release_fence);
3372 if (rc != OK) {
3373 LOGE("input buffer sync wait failed %d", rc);
3374 }
3375 }
3376
3377 camera3_capture_result result;
3378 memset(&result, 0, sizeof(camera3_capture_result));
3379 result.frame_number = frame_number;
3380 result.result = i->settings;
3381 result.input_buffer = i->input_buffer;
3382 result.partial_result = PARTIAL_RESULT_COUNT;
3383
3384 mCallbackOps->process_capture_result(mCallbackOps, &result);
3385 LOGD("Input request metadata and input buffer frame_number = %u",
3386 i->frame_number);
3387 i = erasePendingRequest(i);
3388 } else {
3389 LOGE("Could not find input request for frame number %d", frame_number);
3390 }
3391}
3392
3393/*===========================================================================
3394 * FUNCTION : handleBufferWithLock
3395 *
3396 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3397 *
3398 * PARAMETERS : @buffer: image buffer for the callback
3399 * @frame_number: frame number of the image buffer
3400 *
3401 * RETURN :
3402 *
3403 *==========================================================================*/
3404void QCamera3HardwareInterface::handleBufferWithLock(
3405 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3406{
3407 ATRACE_CALL();
3408 /* Nothing to be done during error state */
3409 if ((ERROR == mState) || (DEINIT == mState)) {
3410 return;
3411 }
3412 if (mFlushPerf) {
3413 handleBuffersDuringFlushLock(buffer);
3414 return;
3415 }
3416 //not in flush
3417 // If the frame number doesn't exist in the pending request list,
3418 // directly send the buffer to the frameworks, and update pending buffers map
3419 // Otherwise, book-keep the buffer.
3420 pendingRequestIterator i = mPendingRequestsList.begin();
3421 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3422 i++;
3423 }
3424 if (i == mPendingRequestsList.end()) {
3425 // Verify all pending requests frame_numbers are greater
3426 for (pendingRequestIterator j = mPendingRequestsList.begin();
3427 j != mPendingRequestsList.end(); j++) {
3428 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3429 LOGW("Error: pending live frame number %d is smaller than %d",
3430 j->frame_number, frame_number);
3431 }
3432 }
3433 camera3_capture_result_t result;
3434 memset(&result, 0, sizeof(camera3_capture_result_t));
3435 result.result = NULL;
3436 result.frame_number = frame_number;
3437 result.num_output_buffers = 1;
3438 result.partial_result = 0;
3439 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3440 m != mPendingFrameDropList.end(); m++) {
3441 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3442 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3443 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3444 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3445 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3446 frame_number, streamID);
3447 m = mPendingFrameDropList.erase(m);
3448 break;
3449 }
3450 }
3451 result.output_buffers = buffer;
3452 LOGH("result frame_number = %d, buffer = %p",
3453 frame_number, buffer->buffer);
3454
3455 mPendingBuffersMap.removeBuf(buffer->buffer);
3456
3457 mCallbackOps->process_capture_result(mCallbackOps, &result);
3458 } else {
3459 if (i->input_buffer) {
3460 CameraMetadata settings;
3461 camera3_notify_msg_t notify_msg;
3462 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3463 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3464 if(i->settings) {
3465 settings = i->settings;
3466 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3467 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3468 } else {
3469 LOGW("No timestamp in input settings! Using current one.");
3470 }
3471 } else {
3472 LOGE("Input settings missing!");
3473 }
3474
3475 notify_msg.type = CAMERA3_MSG_SHUTTER;
3476 notify_msg.message.shutter.frame_number = frame_number;
3477 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3478
3479 if (i->input_buffer->release_fence != -1) {
3480 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3481 close(i->input_buffer->release_fence);
3482 if (rc != OK) {
3483 LOGE("input buffer sync wait failed %d", rc);
3484 }
3485 }
3486 mPendingBuffersMap.removeBuf(buffer->buffer);
3487
Thierry Strudel04e026f2016-10-10 11:27:36 -07003488 camera3_capture_result result;
3489 memset(&result, 0, sizeof(camera3_capture_result));
3490 result.frame_number = frame_number;
3491 result.result = i->settings;
3492 result.input_buffer = i->input_buffer;
3493 result.num_output_buffers = 1;
3494 result.output_buffers = buffer;
3495 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003496
Thierry Strudel04e026f2016-10-10 11:27:36 -07003497 mCallbackOps->notify(mCallbackOps, &notify_msg);
3498 mCallbackOps->process_capture_result(mCallbackOps, &result);
3499 LOGD("Notify reprocess now %d!", frame_number);
3500 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003501 } else {
3502 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3503 j != i->buffers.end(); j++) {
3504 if (j->stream == buffer->stream) {
3505 if (j->buffer != NULL) {
3506 LOGE("Error: buffer is already set");
3507 } else {
3508 j->buffer = (camera3_stream_buffer_t *)malloc(
3509 sizeof(camera3_stream_buffer_t));
3510 *(j->buffer) = *buffer;
3511 LOGH("cache buffer %p at result frame_number %u",
3512 buffer->buffer, frame_number);
3513 }
3514 }
3515 }
3516 }
3517 }
3518}
3519
3520/*===========================================================================
3521 * FUNCTION : unblockRequestIfNecessary
3522 *
3523 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3524 * that mMutex is held when this function is called.
3525 *
3526 * PARAMETERS :
3527 *
3528 * RETURN :
3529 *
3530 *==========================================================================*/
3531void QCamera3HardwareInterface::unblockRequestIfNecessary()
3532{
3533 // Unblock process_capture_request
3534 pthread_cond_signal(&mRequestCond);
3535}
3536
3537
3538/*===========================================================================
3539 * FUNCTION : processCaptureRequest
3540 *
3541 * DESCRIPTION: process a capture request from camera service
3542 *
3543 * PARAMETERS :
3544 * @request : request from framework to process
3545 *
3546 * RETURN :
3547 *
3548 *==========================================================================*/
3549int QCamera3HardwareInterface::processCaptureRequest(
3550 camera3_capture_request_t *request)
3551{
3552 ATRACE_CALL();
3553 int rc = NO_ERROR;
3554 int32_t request_id;
3555 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003556 bool isVidBufRequested = false;
3557 camera3_stream_buffer_t *pInputBuffer = NULL;
3558
3559 pthread_mutex_lock(&mMutex);
3560
3561 // Validate current state
3562 switch (mState) {
3563 case CONFIGURED:
3564 case STARTED:
3565 /* valid state */
3566 break;
3567
3568 case ERROR:
3569 pthread_mutex_unlock(&mMutex);
3570 handleCameraDeviceError();
3571 return -ENODEV;
3572
3573 default:
3574 LOGE("Invalid state %d", mState);
3575 pthread_mutex_unlock(&mMutex);
3576 return -ENODEV;
3577 }
3578
3579 rc = validateCaptureRequest(request);
3580 if (rc != NO_ERROR) {
3581 LOGE("incoming request is not valid");
3582 pthread_mutex_unlock(&mMutex);
3583 return rc;
3584 }
3585
3586 meta = request->settings;
3587
3588 // For first capture request, send capture intent, and
3589 // stream on all streams
3590 if (mState == CONFIGURED) {
3591 // send an unconfigure to the backend so that the isp
3592 // resources are deallocated
3593 if (!mFirstConfiguration) {
3594 cam_stream_size_info_t stream_config_info;
3595 int32_t hal_version = CAM_HAL_V3;
3596 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3597 stream_config_info.buffer_info.min_buffers =
3598 MIN_INFLIGHT_REQUESTS;
3599 stream_config_info.buffer_info.max_buffers =
3600 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3601 clear_metadata_buffer(mParameters);
3602 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3603 CAM_INTF_PARM_HAL_VERSION, hal_version);
3604 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3605 CAM_INTF_META_STREAM_INFO, stream_config_info);
3606 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3607 mParameters);
3608 if (rc < 0) {
3609 LOGE("set_parms for unconfigure failed");
3610 pthread_mutex_unlock(&mMutex);
3611 return rc;
3612 }
3613 }
3614 m_perfLock.lock_acq();
3615 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003616 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07003617 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003618 property_get("persist.camera.is_type", is_type_value, "4");
3619 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3620 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3621 property_get("persist.camera.is_type_preview", is_type_value, "4");
3622 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3623 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07003624
3625 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3626 int32_t hal_version = CAM_HAL_V3;
3627 uint8_t captureIntent =
3628 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3629 mCaptureIntent = captureIntent;
3630 clear_metadata_buffer(mParameters);
3631 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3632 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3633 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003634 if (mFirstConfiguration) {
3635 // configure instant AEC
3636 // Instant AEC is a session based parameter and it is needed only
3637 // once per complete session after open camera.
3638 // i.e. This is set only once for the first capture request, after open camera.
3639 setInstantAEC(meta);
3640 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003641 uint8_t fwkVideoStabMode=0;
3642 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3643 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3644 }
3645
3646 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
3647 // turn it on for video/preview
3648 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
3649 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07003650 int32_t vsMode;
3651 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3653 rc = BAD_VALUE;
3654 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003655 LOGD("setEis %d", setEis);
3656 bool eis3Supported = false;
3657 size_t count = IS_TYPE_MAX;
3658 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3659 for (size_t i = 0; i < count; i++) {
3660 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3661 eis3Supported = true;
3662 break;
3663 }
3664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003665
3666 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003667 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07003668 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3669 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003670 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3671 is_type = isTypePreview;
3672 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3673 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3674 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07003675 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003676 } else {
3677 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003679 } else {
3680 is_type = IS_TYPE_NONE;
3681 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003682 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003683 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003684 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3685 }
3686 }
3687
3688 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3689 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3690
3691 int32_t tintless_value = 1;
3692 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3693 CAM_INTF_PARM_TINTLESS, tintless_value);
3694 //Disable CDS for HFR mode or if DIS/EIS is on.
3695 //CDS is a session parameter in the backend/ISP, so need to be set/reset
3696 //after every configure_stream
3697 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3698 (m_bIsVideo)) {
3699 int32_t cds = CAM_CDS_MODE_OFF;
3700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3701 CAM_INTF_PARM_CDS_MODE, cds))
3702 LOGE("Failed to disable CDS for HFR mode");
3703
3704 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003705
3706 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3707 uint8_t* use_av_timer = NULL;
3708
3709 if (m_debug_avtimer){
3710 use_av_timer = &m_debug_avtimer;
3711 }
3712 else{
3713 use_av_timer =
3714 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3715 }
3716
3717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3718 rc = BAD_VALUE;
3719 }
3720 }
3721
Thierry Strudel3d639192016-09-09 11:52:26 -07003722 setMobicat();
3723
3724 /* Set fps and hfr mode while sending meta stream info so that sensor
3725 * can configure appropriate streaming mode */
3726 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003727 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3728 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003729 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3730 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003731 if (rc == NO_ERROR) {
3732 int32_t max_fps =
3733 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3734 if (max_fps == 60) {
3735 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3736 }
3737 /* For HFR, more buffers are dequeued upfront to improve the performance */
3738 if (mBatchSize) {
3739 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3740 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3741 }
3742 }
3743 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003744 LOGE("setHalFpsRange failed");
3745 }
3746 }
3747 if (meta.exists(ANDROID_CONTROL_MODE)) {
3748 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3749 rc = extractSceneMode(meta, metaMode, mParameters);
3750 if (rc != NO_ERROR) {
3751 LOGE("extractSceneMode failed");
3752 }
3753 }
3754
Thierry Strudel04e026f2016-10-10 11:27:36 -07003755 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3756 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3757 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3758 rc = setVideoHdrMode(mParameters, vhdr);
3759 if (rc != NO_ERROR) {
3760 LOGE("setVideoHDR is failed");
3761 }
3762 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003763
Thierry Strudel3d639192016-09-09 11:52:26 -07003764 //TODO: validate the arguments, HSV scenemode should have only the
3765 //advertised fps ranges
3766
3767 /*set the capture intent, hal version, tintless, stream info,
3768 *and disenable parameters to the backend*/
3769 LOGD("set_parms META_STREAM_INFO " );
3770 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3771 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003772 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07003773 mStreamConfigInfo.type[i],
3774 mStreamConfigInfo.stream_sizes[i].width,
3775 mStreamConfigInfo.stream_sizes[i].height,
3776 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003777 mStreamConfigInfo.format[i],
3778 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07003779 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003780
Thierry Strudel3d639192016-09-09 11:52:26 -07003781 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3782 mParameters);
3783 if (rc < 0) {
3784 LOGE("set_parms failed for hal version, stream info");
3785 }
3786
3787 cam_dimension_t sensor_dim;
3788 memset(&sensor_dim, 0, sizeof(sensor_dim));
3789 rc = getSensorOutputSize(sensor_dim);
3790 if (rc != NO_ERROR) {
3791 LOGE("Failed to get sensor output size");
3792 pthread_mutex_unlock(&mMutex);
3793 goto error_exit;
3794 }
3795
3796 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3797 gCamCapability[mCameraId]->active_array_size.height,
3798 sensor_dim.width, sensor_dim.height);
3799
3800 /* Set batchmode before initializing channel. Since registerBuffer
3801 * internally initializes some of the channels, better set batchmode
3802 * even before first register buffer */
3803 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3804 it != mStreamInfo.end(); it++) {
3805 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3807 && mBatchSize) {
3808 rc = channel->setBatchSize(mBatchSize);
3809 //Disable per frame map unmap for HFR/batchmode case
3810 rc |= channel->setPerFrameMapUnmap(false);
3811 if (NO_ERROR != rc) {
3812 LOGE("Channel init failed %d", rc);
3813 pthread_mutex_unlock(&mMutex);
3814 goto error_exit;
3815 }
3816 }
3817 }
3818
3819 //First initialize all streams
3820 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3821 it != mStreamInfo.end(); it++) {
3822 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3823 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3824 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003825 setEis) {
3826 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3827 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
3828 is_type = mStreamConfigInfo.is_type[i];
3829 break;
3830 }
3831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003833 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003834 rc = channel->initialize(IS_TYPE_NONE);
3835 }
3836 if (NO_ERROR != rc) {
3837 LOGE("Channel initialization failed %d", rc);
3838 pthread_mutex_unlock(&mMutex);
3839 goto error_exit;
3840 }
3841 }
3842
3843 if (mRawDumpChannel) {
3844 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3845 if (rc != NO_ERROR) {
3846 LOGE("Error: Raw Dump Channel init failed");
3847 pthread_mutex_unlock(&mMutex);
3848 goto error_exit;
3849 }
3850 }
3851 if (mSupportChannel) {
3852 rc = mSupportChannel->initialize(IS_TYPE_NONE);
3853 if (rc < 0) {
3854 LOGE("Support channel initialization failed");
3855 pthread_mutex_unlock(&mMutex);
3856 goto error_exit;
3857 }
3858 }
3859 if (mAnalysisChannel) {
3860 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3861 if (rc < 0) {
3862 LOGE("Analysis channel initialization failed");
3863 pthread_mutex_unlock(&mMutex);
3864 goto error_exit;
3865 }
3866 }
3867 if (mDummyBatchChannel) {
3868 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3869 if (rc < 0) {
3870 LOGE("mDummyBatchChannel setBatchSize failed");
3871 pthread_mutex_unlock(&mMutex);
3872 goto error_exit;
3873 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003874 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07003875 if (rc < 0) {
3876 LOGE("mDummyBatchChannel initialization failed");
3877 pthread_mutex_unlock(&mMutex);
3878 goto error_exit;
3879 }
3880 }
3881
3882 // Set bundle info
3883 rc = setBundleInfo();
3884 if (rc < 0) {
3885 LOGE("setBundleInfo failed %d", rc);
3886 pthread_mutex_unlock(&mMutex);
3887 goto error_exit;
3888 }
3889
3890 //update settings from app here
3891 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3892 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3893 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3894 }
3895 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3896 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3897 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3898 }
3899 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3900 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3901 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3902
3903 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3904 (mLinkedCameraId != mCameraId) ) {
3905 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3906 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003907 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003908 goto error_exit;
3909 }
3910 }
3911
3912 // add bundle related cameras
3913 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3914 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003915 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
3916 &m_pDualCamCmdPtr->bundle_info;
3917 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07003918 if (mIsDeviceLinked)
3919 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3920 else
3921 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3922
3923 pthread_mutex_lock(&gCamLock);
3924
3925 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3926 LOGE("Dualcam: Invalid Session Id ");
3927 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003928 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003929 goto error_exit;
3930 }
3931
3932 if (mIsMainCamera == 1) {
3933 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3934 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07003935 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003936 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07003937 // related session id should be session id of linked session
3938 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3939 } else {
3940 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3941 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07003942 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003943 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07003944 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3945 }
3946 pthread_mutex_unlock(&gCamLock);
3947
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003948 rc = mCameraHandle->ops->set_dual_cam_cmd(
3949 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07003950 if (rc < 0) {
3951 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003952 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003953 goto error_exit;
3954 }
3955 }
3956
3957 //Then start them.
3958 LOGH("Start META Channel");
3959 rc = mMetadataChannel->start();
3960 if (rc < 0) {
3961 LOGE("META channel start failed");
3962 pthread_mutex_unlock(&mMutex);
3963 goto error_exit;
3964 }
3965
3966 if (mAnalysisChannel) {
3967 rc = mAnalysisChannel->start();
3968 if (rc < 0) {
3969 LOGE("Analysis channel start failed");
3970 mMetadataChannel->stop();
3971 pthread_mutex_unlock(&mMutex);
3972 goto error_exit;
3973 }
3974 }
3975
3976 if (mSupportChannel) {
3977 rc = mSupportChannel->start();
3978 if (rc < 0) {
3979 LOGE("Support channel start failed");
3980 mMetadataChannel->stop();
3981 /* Although support and analysis are mutually exclusive today
3982 adding it in anycase for future proofing */
3983 if (mAnalysisChannel) {
3984 mAnalysisChannel->stop();
3985 }
3986 pthread_mutex_unlock(&mMutex);
3987 goto error_exit;
3988 }
3989 }
3990 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3991 it != mStreamInfo.end(); it++) {
3992 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3993 LOGH("Start Processing Channel mask=%d",
3994 channel->getStreamTypeMask());
3995 rc = channel->start();
3996 if (rc < 0) {
3997 LOGE("channel start failed");
3998 pthread_mutex_unlock(&mMutex);
3999 goto error_exit;
4000 }
4001 }
4002
4003 if (mRawDumpChannel) {
4004 LOGD("Starting raw dump stream");
4005 rc = mRawDumpChannel->start();
4006 if (rc != NO_ERROR) {
4007 LOGE("Error Starting Raw Dump Channel");
4008 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4009 it != mStreamInfo.end(); it++) {
4010 QCamera3Channel *channel =
4011 (QCamera3Channel *)(*it)->stream->priv;
4012 LOGH("Stopping Processing Channel mask=%d",
4013 channel->getStreamTypeMask());
4014 channel->stop();
4015 }
4016 if (mSupportChannel)
4017 mSupportChannel->stop();
4018 if (mAnalysisChannel) {
4019 mAnalysisChannel->stop();
4020 }
4021 mMetadataChannel->stop();
4022 pthread_mutex_unlock(&mMutex);
4023 goto error_exit;
4024 }
4025 }
4026
4027 if (mChannelHandle) {
4028
4029 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4030 mChannelHandle);
4031 if (rc != NO_ERROR) {
4032 LOGE("start_channel failed %d", rc);
4033 pthread_mutex_unlock(&mMutex);
4034 goto error_exit;
4035 }
4036 }
4037
4038 goto no_error;
4039error_exit:
4040 m_perfLock.lock_rel();
4041 return rc;
4042no_error:
4043 m_perfLock.lock_rel();
4044
4045 mWokenUpByDaemon = false;
4046 mPendingLiveRequest = 0;
4047 mFirstConfiguration = false;
4048 enablePowerHint();
4049 }
4050
4051 uint32_t frameNumber = request->frame_number;
4052 cam_stream_ID_t streamID;
4053
4054 if (mFlushPerf) {
4055 //we cannot accept any requests during flush
4056 LOGE("process_capture_request cannot proceed during flush");
4057 pthread_mutex_unlock(&mMutex);
4058 return NO_ERROR; //should return an error
4059 }
4060
4061 if (meta.exists(ANDROID_REQUEST_ID)) {
4062 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4063 mCurrentRequestId = request_id;
4064 LOGD("Received request with id: %d", request_id);
4065 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4066 LOGE("Unable to find request id field, \
4067 & no previous id available");
4068 pthread_mutex_unlock(&mMutex);
4069 return NAME_NOT_FOUND;
4070 } else {
4071 LOGD("Re-using old request id");
4072 request_id = mCurrentRequestId;
4073 }
4074
4075 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4076 request->num_output_buffers,
4077 request->input_buffer,
4078 frameNumber);
4079 // Acquire all request buffers first
4080 streamID.num_streams = 0;
4081 int blob_request = 0;
4082 uint32_t snapshotStreamId = 0;
4083 for (size_t i = 0; i < request->num_output_buffers; i++) {
4084 const camera3_stream_buffer_t& output = request->output_buffers[i];
4085 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4086
4087 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4088 //Call function to store local copy of jpeg data for encode params.
4089 blob_request = 1;
4090 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4091 }
4092
4093 if (output.acquire_fence != -1) {
4094 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4095 close(output.acquire_fence);
4096 if (rc != OK) {
4097 LOGE("sync wait failed %d", rc);
4098 pthread_mutex_unlock(&mMutex);
4099 return rc;
4100 }
4101 }
4102
4103 streamID.streamID[streamID.num_streams] =
4104 channel->getStreamID(channel->getStreamTypeMask());
4105 streamID.num_streams++;
4106
4107 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4108 isVidBufRequested = true;
4109 }
4110 }
4111
4112 if (blob_request) {
4113 KPI_ATRACE_INT("SNAPSHOT", 1);
4114 }
4115 if (blob_request && mRawDumpChannel) {
4116 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
4117 streamID.streamID[streamID.num_streams] =
4118 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
4119 streamID.num_streams++;
4120 }
4121
4122 if(request->input_buffer == NULL) {
4123 /* Parse the settings:
4124 * - For every request in NORMAL MODE
4125 * - For every request in HFR mode during preview only case
4126 * - For first request of every batch in HFR mode during video
4127 * recording. In batchmode the same settings except frame number is
4128 * repeated in each request of the batch.
4129 */
4130 if (!mBatchSize ||
4131 (mBatchSize && !isVidBufRequested) ||
4132 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4133 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
4134 if (rc < 0) {
4135 LOGE("fail to set frame parameters");
4136 pthread_mutex_unlock(&mMutex);
4137 return rc;
4138 }
4139 }
4140 /* For batchMode HFR, setFrameParameters is not called for every
4141 * request. But only frame number of the latest request is parsed.
4142 * Keep track of first and last frame numbers in a batch so that
4143 * metadata for the frame numbers of batch can be duplicated in
4144 * handleBatchMetadta */
4145 if (mBatchSize) {
4146 if (!mToBeQueuedVidBufs) {
4147 //start of the batch
4148 mFirstFrameNumberInBatch = request->frame_number;
4149 }
4150 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4151 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4152 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004153 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004154 return BAD_VALUE;
4155 }
4156 }
4157 if (mNeedSensorRestart) {
4158 /* Unlock the mutex as restartSensor waits on the channels to be
4159 * stopped, which in turn calls stream callback functions -
4160 * handleBufferWithLock and handleMetadataWithLock */
4161 pthread_mutex_unlock(&mMutex);
4162 rc = dynamicUpdateMetaStreamInfo();
4163 if (rc != NO_ERROR) {
4164 LOGE("Restarting the sensor failed");
4165 return BAD_VALUE;
4166 }
4167 mNeedSensorRestart = false;
4168 pthread_mutex_lock(&mMutex);
4169 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004170 if(mResetInstantAEC) {
4171 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4172 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4173 mResetInstantAEC = false;
4174 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004175 } else {
4176
4177 if (request->input_buffer->acquire_fence != -1) {
4178 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4179 close(request->input_buffer->acquire_fence);
4180 if (rc != OK) {
4181 LOGE("input buffer sync wait failed %d", rc);
4182 pthread_mutex_unlock(&mMutex);
4183 return rc;
4184 }
4185 }
4186 }
4187
4188 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4189 mLastCustIntentFrmNum = frameNumber;
4190 }
4191 /* Update pending request list and pending buffers map */
4192 PendingRequestInfo pendingRequest;
4193 pendingRequestIterator latestRequest;
4194 pendingRequest.frame_number = frameNumber;
4195 pendingRequest.num_buffers = request->num_output_buffers;
4196 pendingRequest.request_id = request_id;
4197 pendingRequest.blob_request = blob_request;
4198 pendingRequest.timestamp = 0;
4199 pendingRequest.bUrgentReceived = 0;
4200 if (request->input_buffer) {
4201 pendingRequest.input_buffer =
4202 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4203 *(pendingRequest.input_buffer) = *(request->input_buffer);
4204 pInputBuffer = pendingRequest.input_buffer;
4205 } else {
4206 pendingRequest.input_buffer = NULL;
4207 pInputBuffer = NULL;
4208 }
4209
4210 pendingRequest.pipeline_depth = 0;
4211 pendingRequest.partial_result_cnt = 0;
4212 extractJpegMetadata(mCurJpegMeta, request);
4213 pendingRequest.jpegMetadata = mCurJpegMeta;
4214 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4215 pendingRequest.shutter_notified = false;
4216
4217 //extract capture intent
4218 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4219 mCaptureIntent =
4220 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4221 }
4222 pendingRequest.capture_intent = mCaptureIntent;
4223
4224 //extract CAC info
4225 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4226 mCacMode =
4227 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4228 }
4229 pendingRequest.fwkCacMode = mCacMode;
4230
4231 PendingBuffersInRequest bufsForCurRequest;
4232 bufsForCurRequest.frame_number = frameNumber;
4233 // Mark current timestamp for the new request
4234 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4235
4236 for (size_t i = 0; i < request->num_output_buffers; i++) {
4237 RequestedBufferInfo requestedBuf;
4238 memset(&requestedBuf, 0, sizeof(requestedBuf));
4239 requestedBuf.stream = request->output_buffers[i].stream;
4240 requestedBuf.buffer = NULL;
4241 pendingRequest.buffers.push_back(requestedBuf);
4242
4243 // Add to buffer handle the pending buffers list
4244 PendingBufferInfo bufferInfo;
4245 bufferInfo.buffer = request->output_buffers[i].buffer;
4246 bufferInfo.stream = request->output_buffers[i].stream;
4247 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4248 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4249 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4250 frameNumber, bufferInfo.buffer,
4251 channel->getStreamTypeMask(), bufferInfo.stream->format);
4252 }
4253 // Add this request packet into mPendingBuffersMap
4254 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4255 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4256 mPendingBuffersMap.get_num_overall_buffers());
4257
4258 latestRequest = mPendingRequestsList.insert(
4259 mPendingRequestsList.end(), pendingRequest);
4260 if(mFlush) {
4261 LOGI("mFlush is true");
4262 pthread_mutex_unlock(&mMutex);
4263 return NO_ERROR;
4264 }
4265
4266 // Notify metadata channel we receive a request
4267 mMetadataChannel->request(NULL, frameNumber);
4268
4269 if(request->input_buffer != NULL){
4270 LOGD("Input request, frame_number %d", frameNumber);
4271 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4272 if (NO_ERROR != rc) {
4273 LOGE("fail to set reproc parameters");
4274 pthread_mutex_unlock(&mMutex);
4275 return rc;
4276 }
4277 }
4278
4279 // Call request on other streams
4280 uint32_t streams_need_metadata = 0;
4281 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4282 for (size_t i = 0; i < request->num_output_buffers; i++) {
4283 const camera3_stream_buffer_t& output = request->output_buffers[i];
4284 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4285
4286 if (channel == NULL) {
4287 LOGW("invalid channel pointer for stream");
4288 continue;
4289 }
4290
4291 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4292 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4293 output.buffer, request->input_buffer, frameNumber);
4294 if(request->input_buffer != NULL){
4295 rc = channel->request(output.buffer, frameNumber,
4296 pInputBuffer, &mReprocMeta);
4297 if (rc < 0) {
4298 LOGE("Fail to request on picture channel");
4299 pthread_mutex_unlock(&mMutex);
4300 return rc;
4301 }
4302 } else {
4303 LOGD("snapshot request with buffer %p, frame_number %d",
4304 output.buffer, frameNumber);
4305 if (!request->settings) {
4306 rc = channel->request(output.buffer, frameNumber,
4307 NULL, mPrevParameters);
4308 } else {
4309 rc = channel->request(output.buffer, frameNumber,
4310 NULL, mParameters);
4311 }
4312 if (rc < 0) {
4313 LOGE("Fail to request on picture channel");
4314 pthread_mutex_unlock(&mMutex);
4315 return rc;
4316 }
4317 pendingBufferIter->need_metadata = true;
4318 streams_need_metadata++;
4319 }
4320 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4321 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004322 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4323 rc = yuvChannel->request(output.buffer, frameNumber,
4324 pInputBuffer,
4325 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
4326 if (rc < 0) {
4327 LOGE("Fail to request on YUV channel");
4328 pthread_mutex_unlock(&mMutex);
4329 return rc;
4330 }
4331 pendingBufferIter->need_metadata = needMetadata;
4332 if (needMetadata)
4333 streams_need_metadata += 1;
4334 LOGD("calling YUV channel request, need_metadata is %d",
4335 needMetadata);
4336 } else {
4337 LOGD("request with buffer %p, frame_number %d",
4338 output.buffer, frameNumber);
Thierry Strudel3d639192016-09-09 11:52:26 -07004339 rc = channel->request(output.buffer, frameNumber);
4340 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4341 && mBatchSize) {
4342 mToBeQueuedVidBufs++;
4343 if (mToBeQueuedVidBufs == mBatchSize) {
4344 channel->queueBatchBuf();
4345 }
4346 }
4347 if (rc < 0) {
4348 LOGE("request failed");
4349 pthread_mutex_unlock(&mMutex);
4350 return rc;
4351 }
4352 }
4353 pendingBufferIter++;
4354 }
4355
4356 //If 2 streams have need_metadata set to true, fail the request, unless
4357 //we copy/reference count the metadata buffer
4358 if (streams_need_metadata > 1) {
4359 LOGE("not supporting request in which two streams requires"
4360 " 2 HAL metadata for reprocessing");
4361 pthread_mutex_unlock(&mMutex);
4362 return -EINVAL;
4363 }
4364
4365 if(request->input_buffer == NULL) {
4366 /* Set the parameters to backend:
4367 * - For every request in NORMAL MODE
4368 * - For every request in HFR mode during preview only case
4369 * - Once every batch in HFR mode during video recording
4370 */
4371 if (!mBatchSize ||
4372 (mBatchSize && !isVidBufRequested) ||
4373 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4374 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4375 mBatchSize, isVidBufRequested,
4376 mToBeQueuedVidBufs);
4377 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4378 mParameters);
4379 if (rc < 0) {
4380 LOGE("set_parms failed");
4381 }
4382 /* reset to zero coz, the batch is queued */
4383 mToBeQueuedVidBufs = 0;
4384 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4385 }
4386 mPendingLiveRequest++;
4387 }
4388
4389 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4390
4391 mState = STARTED;
4392 // Added a timed condition wait
4393 struct timespec ts;
4394 uint8_t isValidTimeout = 1;
4395 rc = clock_gettime(CLOCK_REALTIME, &ts);
4396 if (rc < 0) {
4397 isValidTimeout = 0;
4398 LOGE("Error reading the real time clock!!");
4399 }
4400 else {
4401 // Make timeout as 5 sec for request to be honored
4402 ts.tv_sec += 5;
4403 }
4404 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004405 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07004406 (mState != ERROR) && (mState != DEINIT)) {
4407 if (!isValidTimeout) {
4408 LOGD("Blocking on conditional wait");
4409 pthread_cond_wait(&mRequestCond, &mMutex);
4410 }
4411 else {
4412 LOGD("Blocking on timed conditional wait");
4413 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4414 if (rc == ETIMEDOUT) {
4415 rc = -ENODEV;
4416 LOGE("Unblocked on timeout!!!!");
4417 break;
4418 }
4419 }
4420 LOGD("Unblocked");
4421 if (mWokenUpByDaemon) {
4422 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004423 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07004424 break;
4425 }
4426 }
4427 pthread_mutex_unlock(&mMutex);
4428
4429 return rc;
4430}
4431
4432/*===========================================================================
4433 * FUNCTION : dump
4434 *
4435 * DESCRIPTION:
4436 *
4437 * PARAMETERS :
4438 *
4439 *
4440 * RETURN :
4441 *==========================================================================*/
4442void QCamera3HardwareInterface::dump(int fd)
4443{
4444 pthread_mutex_lock(&mMutex);
4445 dprintf(fd, "\n Camera HAL3 information Begin \n");
4446
4447 dprintf(fd, "\nNumber of pending requests: %zu \n",
4448 mPendingRequestsList.size());
4449 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4450 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
4451 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4452 for(pendingRequestIterator i = mPendingRequestsList.begin();
4453 i != mPendingRequestsList.end(); i++) {
4454 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4455 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4456 i->input_buffer);
4457 }
4458 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4459 mPendingBuffersMap.get_num_overall_buffers());
4460 dprintf(fd, "-------+------------------\n");
4461 dprintf(fd, " Frame | Stream type mask \n");
4462 dprintf(fd, "-------+------------------\n");
4463 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4464 for(auto &j : req.mPendingBufferList) {
4465 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4466 dprintf(fd, " %5d | %11d \n",
4467 req.frame_number, channel->getStreamTypeMask());
4468 }
4469 }
4470 dprintf(fd, "-------+------------------\n");
4471
4472 dprintf(fd, "\nPending frame drop list: %zu\n",
4473 mPendingFrameDropList.size());
4474 dprintf(fd, "-------+-----------\n");
4475 dprintf(fd, " Frame | Stream ID \n");
4476 dprintf(fd, "-------+-----------\n");
4477 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4478 i != mPendingFrameDropList.end(); i++) {
4479 dprintf(fd, " %5d | %9d \n",
4480 i->frame_number, i->stream_ID);
4481 }
4482 dprintf(fd, "-------+-----------\n");
4483
4484 dprintf(fd, "\n Camera HAL3 information End \n");
4485
4486 /* use dumpsys media.camera as trigger to send update debug level event */
4487 mUpdateDebugLevel = true;
4488 pthread_mutex_unlock(&mMutex);
4489 return;
4490}
4491
4492/*===========================================================================
4493 * FUNCTION : flush
4494 *
4495 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4496 * conditionally restarts channels
4497 *
4498 * PARAMETERS :
4499 * @ restartChannels: re-start all channels
4500 *
4501 *
4502 * RETURN :
4503 * 0 on success
4504 * Error code on failure
4505 *==========================================================================*/
4506int QCamera3HardwareInterface::flush(bool restartChannels)
4507{
4508 KPI_ATRACE_CALL();
4509 int32_t rc = NO_ERROR;
4510
4511 LOGD("Unblocking Process Capture Request");
4512 pthread_mutex_lock(&mMutex);
4513 mFlush = true;
4514 pthread_mutex_unlock(&mMutex);
4515
4516 rc = stopAllChannels();
4517 // unlink of dualcam
4518 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004519 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4520 &m_pDualCamCmdPtr->bundle_info;
4521 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004522 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4523 pthread_mutex_lock(&gCamLock);
4524
4525 if (mIsMainCamera == 1) {
4526 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4527 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004528 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07004529 // related session id should be session id of linked session
4530 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4531 } else {
4532 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4533 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004534 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07004535 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4536 }
4537 pthread_mutex_unlock(&gCamLock);
4538
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004539 rc = mCameraHandle->ops->set_dual_cam_cmd(
4540 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004541 if (rc < 0) {
4542 LOGE("Dualcam: Unlink failed, but still proceed to close");
4543 }
4544 }
4545
4546 if (rc < 0) {
4547 LOGE("stopAllChannels failed");
4548 return rc;
4549 }
4550 if (mChannelHandle) {
4551 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4552 mChannelHandle);
4553 }
4554
4555 // Reset bundle info
4556 rc = setBundleInfo();
4557 if (rc < 0) {
4558 LOGE("setBundleInfo failed %d", rc);
4559 return rc;
4560 }
4561
4562 // Mutex Lock
4563 pthread_mutex_lock(&mMutex);
4564
4565 // Unblock process_capture_request
4566 mPendingLiveRequest = 0;
4567 pthread_cond_signal(&mRequestCond);
4568
4569 rc = notifyErrorForPendingRequests();
4570 if (rc < 0) {
4571 LOGE("notifyErrorForPendingRequests failed");
4572 pthread_mutex_unlock(&mMutex);
4573 return rc;
4574 }
4575
4576 mFlush = false;
4577
4578 // Start the Streams/Channels
4579 if (restartChannels) {
4580 rc = startAllChannels();
4581 if (rc < 0) {
4582 LOGE("startAllChannels failed");
4583 pthread_mutex_unlock(&mMutex);
4584 return rc;
4585 }
4586 }
4587
4588 if (mChannelHandle) {
4589 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4590 mChannelHandle);
4591 if (rc < 0) {
4592 LOGE("start_channel failed");
4593 pthread_mutex_unlock(&mMutex);
4594 return rc;
4595 }
4596 }
4597
4598 pthread_mutex_unlock(&mMutex);
4599
4600 return 0;
4601}
4602
4603/*===========================================================================
4604 * FUNCTION : flushPerf
4605 *
4606 * DESCRIPTION: This is the performance optimization version of flush that does
4607 * not use stream off, rather flushes the system
4608 *
4609 * PARAMETERS :
4610 *
4611 *
4612 * RETURN : 0 : success
4613 * -EINVAL: input is malformed (device is not valid)
4614 * -ENODEV: if the device has encountered a serious error
4615 *==========================================================================*/
4616int QCamera3HardwareInterface::flushPerf()
4617{
4618 ATRACE_CALL();
4619 int32_t rc = 0;
4620 struct timespec timeout;
4621 bool timed_wait = false;
4622
4623 pthread_mutex_lock(&mMutex);
4624 mFlushPerf = true;
4625 mPendingBuffersMap.numPendingBufsAtFlush =
4626 mPendingBuffersMap.get_num_overall_buffers();
4627 LOGD("Calling flush. Wait for %d buffers to return",
4628 mPendingBuffersMap.numPendingBufsAtFlush);
4629
4630 /* send the flush event to the backend */
4631 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4632 if (rc < 0) {
4633 LOGE("Error in flush: IOCTL failure");
4634 mFlushPerf = false;
4635 pthread_mutex_unlock(&mMutex);
4636 return -ENODEV;
4637 }
4638
4639 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4640 LOGD("No pending buffers in HAL, return flush");
4641 mFlushPerf = false;
4642 pthread_mutex_unlock(&mMutex);
4643 return rc;
4644 }
4645
4646 /* wait on a signal that buffers were received */
4647 rc = clock_gettime(CLOCK_REALTIME, &timeout);
4648 if (rc < 0) {
4649 LOGE("Error reading the real time clock, cannot use timed wait");
4650 } else {
4651 timeout.tv_sec += FLUSH_TIMEOUT;
4652 timed_wait = true;
4653 }
4654
4655 //Block on conditional variable
4656 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4657 LOGD("Waiting on mBuffersCond");
4658 if (!timed_wait) {
4659 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4660 if (rc != 0) {
4661 LOGE("pthread_cond_wait failed due to rc = %s",
4662 strerror(rc));
4663 break;
4664 }
4665 } else {
4666 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4667 if (rc != 0) {
4668 LOGE("pthread_cond_timedwait failed due to rc = %s",
4669 strerror(rc));
4670 break;
4671 }
4672 }
4673 }
4674 if (rc != 0) {
4675 mFlushPerf = false;
4676 pthread_mutex_unlock(&mMutex);
4677 return -ENODEV;
4678 }
4679
4680 LOGD("Received buffers, now safe to return them");
4681
4682 //make sure the channels handle flush
4683 //currently only required for the picture channel to release snapshot resources
4684 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4685 it != mStreamInfo.end(); it++) {
4686 QCamera3Channel *channel = (*it)->channel;
4687 if (channel) {
4688 rc = channel->flush();
4689 if (rc) {
4690 LOGE("Flushing the channels failed with error %d", rc);
4691 // even though the channel flush failed we need to continue and
4692 // return the buffers we have to the framework, however the return
4693 // value will be an error
4694 rc = -ENODEV;
4695 }
4696 }
4697 }
4698
4699 /* notify the frameworks and send errored results */
4700 rc = notifyErrorForPendingRequests();
4701 if (rc < 0) {
4702 LOGE("notifyErrorForPendingRequests failed");
4703 pthread_mutex_unlock(&mMutex);
4704 return rc;
4705 }
4706
4707 //unblock process_capture_request
4708 mPendingLiveRequest = 0;
4709 unblockRequestIfNecessary();
4710
4711 mFlushPerf = false;
4712 pthread_mutex_unlock(&mMutex);
4713 LOGD ("Flush Operation complete. rc = %d", rc);
4714 return rc;
4715}
4716
4717/*===========================================================================
4718 * FUNCTION : handleCameraDeviceError
4719 *
4720 * DESCRIPTION: This function calls internal flush and notifies the error to
4721 * framework and updates the state variable.
4722 *
4723 * PARAMETERS : None
4724 *
4725 * RETURN : NO_ERROR on Success
4726 * Error code on failure
4727 *==========================================================================*/
4728int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4729{
4730 int32_t rc = NO_ERROR;
4731
4732 pthread_mutex_lock(&mMutex);
4733 if (mState != ERROR) {
4734 //if mState != ERROR, nothing to be done
4735 pthread_mutex_unlock(&mMutex);
4736 return NO_ERROR;
4737 }
4738 pthread_mutex_unlock(&mMutex);
4739
4740 rc = flush(false /* restart channels */);
4741 if (NO_ERROR != rc) {
4742 LOGE("internal flush to handle mState = ERROR failed");
4743 }
4744
4745 pthread_mutex_lock(&mMutex);
4746 mState = DEINIT;
4747 pthread_mutex_unlock(&mMutex);
4748
4749 camera3_notify_msg_t notify_msg;
4750 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4751 notify_msg.type = CAMERA3_MSG_ERROR;
4752 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4753 notify_msg.message.error.error_stream = NULL;
4754 notify_msg.message.error.frame_number = 0;
4755 mCallbackOps->notify(mCallbackOps, &notify_msg);
4756
4757 return rc;
4758}
4759
4760/*===========================================================================
4761 * FUNCTION : captureResultCb
4762 *
4763 * DESCRIPTION: Callback handler for all capture result
4764 * (streams, as well as metadata)
4765 *
4766 * PARAMETERS :
4767 * @metadata : metadata information
4768 * @buffer : actual gralloc buffer to be returned to frameworks.
4769 * NULL if metadata.
4770 *
4771 * RETURN : NONE
4772 *==========================================================================*/
4773void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4774 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4775{
4776 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004777 pthread_mutex_lock(&mMutex);
4778 uint8_t batchSize = mBatchSize;
4779 pthread_mutex_unlock(&mMutex);
4780 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004781 handleBatchMetadata(metadata_buf,
4782 true /* free_and_bufdone_meta_buf */);
4783 } else { /* mBatchSize = 0 */
4784 hdrPlusPerfLock(metadata_buf);
4785 pthread_mutex_lock(&mMutex);
4786 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004787 true /* free_and_bufdone_meta_buf */,
4788 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 pthread_mutex_unlock(&mMutex);
4790 }
4791 } else if (isInputBuffer) {
4792 pthread_mutex_lock(&mMutex);
4793 handleInputBufferWithLock(frame_number);
4794 pthread_mutex_unlock(&mMutex);
4795 } else {
4796 pthread_mutex_lock(&mMutex);
4797 handleBufferWithLock(buffer, frame_number);
4798 pthread_mutex_unlock(&mMutex);
4799 }
4800 return;
4801}
4802
4803/*===========================================================================
4804 * FUNCTION : getReprocessibleOutputStreamId
4805 *
4806 * DESCRIPTION: Get source output stream id for the input reprocess stream
4807 * based on size and format, which would be the largest
4808 * output stream if an input stream exists.
4809 *
4810 * PARAMETERS :
4811 * @id : return the stream id if found
4812 *
4813 * RETURN : int32_t type of status
4814 * NO_ERROR -- success
4815 * none-zero failure code
4816 *==========================================================================*/
4817int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4818{
4819 /* check if any output or bidirectional stream with the same size and format
4820 and return that stream */
4821 if ((mInputStreamInfo.dim.width > 0) &&
4822 (mInputStreamInfo.dim.height > 0)) {
4823 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4824 it != mStreamInfo.end(); it++) {
4825
4826 camera3_stream_t *stream = (*it)->stream;
4827 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4828 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4829 (stream->format == mInputStreamInfo.format)) {
4830 // Usage flag for an input stream and the source output stream
4831 // may be different.
4832 LOGD("Found reprocessible output stream! %p", *it);
4833 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4834 stream->usage, mInputStreamInfo.usage);
4835
4836 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4837 if (channel != NULL && channel->mStreams[0]) {
4838 id = channel->mStreams[0]->getMyServerID();
4839 return NO_ERROR;
4840 }
4841 }
4842 }
4843 } else {
4844 LOGD("No input stream, so no reprocessible output stream");
4845 }
4846 return NAME_NOT_FOUND;
4847}
4848
4849/*===========================================================================
4850 * FUNCTION : lookupFwkName
4851 *
4852 * DESCRIPTION: In case the enum is not same in fwk and backend
4853 * make sure the parameter is correctly propogated
4854 *
4855 * PARAMETERS :
4856 * @arr : map between the two enums
4857 * @len : len of the map
4858 * @hal_name : name of the hal_parm to map
4859 *
4860 * RETURN : int type of status
4861 * fwk_name -- success
4862 * none-zero failure code
4863 *==========================================================================*/
4864template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4865 size_t len, halType hal_name)
4866{
4867
4868 for (size_t i = 0; i < len; i++) {
4869 if (arr[i].hal_name == hal_name) {
4870 return arr[i].fwk_name;
4871 }
4872 }
4873
4874 /* Not able to find matching framework type is not necessarily
4875 * an error case. This happens when mm-camera supports more attributes
4876 * than the frameworks do */
4877 LOGH("Cannot find matching framework type");
4878 return NAME_NOT_FOUND;
4879}
4880
4881/*===========================================================================
4882 * FUNCTION : lookupHalName
4883 *
4884 * DESCRIPTION: In case the enum is not same in fwk and backend
4885 * make sure the parameter is correctly propogated
4886 *
4887 * PARAMETERS :
4888 * @arr : map between the two enums
4889 * @len : len of the map
4890 * @fwk_name : name of the hal_parm to map
4891 *
4892 * RETURN : int32_t type of status
4893 * hal_name -- success
4894 * none-zero failure code
4895 *==========================================================================*/
4896template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4897 size_t len, fwkType fwk_name)
4898{
4899 for (size_t i = 0; i < len; i++) {
4900 if (arr[i].fwk_name == fwk_name) {
4901 return arr[i].hal_name;
4902 }
4903 }
4904
4905 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4906 return NAME_NOT_FOUND;
4907}
4908
4909/*===========================================================================
4910 * FUNCTION : lookupProp
4911 *
4912 * DESCRIPTION: lookup a value by its name
4913 *
4914 * PARAMETERS :
4915 * @arr : map between the two enums
4916 * @len : size of the map
4917 * @name : name to be looked up
4918 *
4919 * RETURN : Value if found
4920 * CAM_CDS_MODE_MAX if not found
4921 *==========================================================================*/
4922template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4923 size_t len, const char *name)
4924{
4925 if (name) {
4926 for (size_t i = 0; i < len; i++) {
4927 if (!strcmp(arr[i].desc, name)) {
4928 return arr[i].val;
4929 }
4930 }
4931 }
4932 return CAM_CDS_MODE_MAX;
4933}
4934
4935/*===========================================================================
4936 *
4937 * DESCRIPTION:
4938 *
4939 * PARAMETERS :
4940 * @metadata : metadata information from callback
4941 * @timestamp: metadata buffer timestamp
4942 * @request_id: request id
4943 * @jpegMetadata: additional jpeg metadata
4944 * @pprocDone: whether internal offline postprocsesing is done
4945 *
4946 * RETURN : camera_metadata_t*
4947 * metadata in a format specified by fwk
4948 *==========================================================================*/
4949camera_metadata_t*
4950QCamera3HardwareInterface::translateFromHalMetadata(
4951 metadata_buffer_t *metadata,
4952 nsecs_t timestamp,
4953 int32_t request_id,
4954 const CameraMetadata& jpegMetadata,
4955 uint8_t pipeline_depth,
4956 uint8_t capture_intent,
4957 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 uint8_t fwk_cacMode,
4959 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07004960{
4961 CameraMetadata camMetadata;
4962 camera_metadata_t *resultMetadata;
4963
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004964 if (mBatchSize && !firstMetadataInBatch) {
4965 /* In batch mode, use cached metadata from the first metadata
4966 in the batch */
4967 camMetadata.clear();
4968 camMetadata = mCachedMetadata;
4969 }
4970
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 if (jpegMetadata.entryCount())
4972 camMetadata.append(jpegMetadata);
4973
4974 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4975 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4976 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4977 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4978
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004979 if (mBatchSize && !firstMetadataInBatch) {
4980 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4981 resultMetadata = camMetadata.release();
4982 return resultMetadata;
4983 }
4984
Thierry Strudel3d639192016-09-09 11:52:26 -07004985 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4986 int64_t fwk_frame_number = *frame_number;
4987 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4988 }
4989
4990 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4991 int32_t fps_range[2];
4992 fps_range[0] = (int32_t)float_range->min_fps;
4993 fps_range[1] = (int32_t)float_range->max_fps;
4994 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4995 fps_range, 2);
4996 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4997 fps_range[0], fps_range[1]);
4998 }
4999
5000 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5001 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5002 }
5003
5004 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5005 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5006 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5007 *sceneMode);
5008 if (NAME_NOT_FOUND != val) {
5009 uint8_t fwkSceneMode = (uint8_t)val;
5010 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5011 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5012 fwkSceneMode);
5013 }
5014 }
5015
5016 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5017 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5018 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5019 }
5020
5021 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5022 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5023 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5024 }
5025
5026 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5027 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5028 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5029 }
5030
5031 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5032 CAM_INTF_META_EDGE_MODE, metadata) {
5033 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5034 }
5035
5036 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5037 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5038 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5039 }
5040
5041 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5042 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5043 }
5044
5045 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5046 if (0 <= *flashState) {
5047 uint8_t fwk_flashState = (uint8_t) *flashState;
5048 if (!gCamCapability[mCameraId]->flash_available) {
5049 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5050 }
5051 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5052 }
5053 }
5054
5055 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5056 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5057 if (NAME_NOT_FOUND != val) {
5058 uint8_t fwk_flashMode = (uint8_t)val;
5059 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5060 }
5061 }
5062
5063 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5064 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5065 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5066 }
5067
5068 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5069 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5070 }
5071
5072 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5073 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5074 }
5075
5076 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5077 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5078 }
5079
5080 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5081 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5082 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5083 }
5084
5085 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5086 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5087 LOGD("fwk_videoStab = %d", fwk_videoStab);
5088 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5089 } else {
5090 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5091 // and so hardcoding the Video Stab result to OFF mode.
5092 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5093 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005094 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 }
5096
5097 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5098 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5099 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5100 }
5101
5102 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5103 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5104 }
5105
5106 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5107 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5108
5109 LOGD("dynamicblackLevel = %f %f %f %f",
5110 blackLevelSourcePattern->cam_black_level[0],
5111 blackLevelSourcePattern->cam_black_level[1],
5112 blackLevelSourcePattern->cam_black_level[2],
5113 blackLevelSourcePattern->cam_black_level[3]);
5114 }
5115
5116 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5117 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5118 float fwk_blackLevelInd[4];
5119
5120 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5121 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5122 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
5123 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
5124
5125 LOGD("applied dynamicblackLevel = %f %f %f %f",
5126 blackLevelAppliedPattern->cam_black_level[0],
5127 blackLevelAppliedPattern->cam_black_level[1],
5128 blackLevelAppliedPattern->cam_black_level[2],
5129 blackLevelAppliedPattern->cam_black_level[3]);
5130 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005131
5132#ifndef USE_HAL_3_3
5133 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5134 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
5135 // depth space.
5136 fwk_blackLevelInd[0] /= 64.0;
5137 fwk_blackLevelInd[1] /= 64.0;
5138 fwk_blackLevelInd[2] /= 64.0;
5139 fwk_blackLevelInd[3] /= 64.0;
5140 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
5141#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 }
5143
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005144#ifndef USE_HAL_3_3
5145 // Fixed whitelevel is used by ISP/Sensor
5146 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5147 &gCamCapability[mCameraId]->white_level, 1);
5148#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005149
5150 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5151 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5152 int32_t scalerCropRegion[4];
5153 scalerCropRegion[0] = hScalerCropRegion->left;
5154 scalerCropRegion[1] = hScalerCropRegion->top;
5155 scalerCropRegion[2] = hScalerCropRegion->width;
5156 scalerCropRegion[3] = hScalerCropRegion->height;
5157
5158 // Adjust crop region from sensor output coordinate system to active
5159 // array coordinate system.
5160 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5161 scalerCropRegion[2], scalerCropRegion[3]);
5162
5163 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5164 }
5165
5166 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5167 LOGD("sensorExpTime = %lld", *sensorExpTime);
5168 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5169 }
5170
5171 IF_META_AVAILABLE(int64_t, sensorFameDuration,
5172 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5173 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5174 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5175 }
5176
5177 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5178 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5179 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5180 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5181 sensorRollingShutterSkew, 1);
5182 }
5183
5184 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5185 LOGD("sensorSensitivity = %d", *sensorSensitivity);
5186 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5187
5188 //calculate the noise profile based on sensitivity
5189 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5190 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5191 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5192 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5193 noise_profile[i] = noise_profile_S;
5194 noise_profile[i+1] = noise_profile_O;
5195 }
5196 LOGD("noise model entry (S, O) is (%f, %f)",
5197 noise_profile_S, noise_profile_O);
5198 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5199 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5200 }
5201
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005202#ifndef USE_HAL_3_3
5203 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5204 int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
5205 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5206 }
5207#endif
5208
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5210 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5211 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5212 }
5213
5214 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5215 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5216 *faceDetectMode);
5217 if (NAME_NOT_FOUND != val) {
5218 uint8_t fwk_faceDetectMode = (uint8_t)val;
5219 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5220
5221 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5222 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5223 CAM_INTF_META_FACE_DETECTION, metadata) {
5224 uint8_t numFaces = MIN(
5225 faceDetectionInfo->num_faces_detected, MAX_ROI);
5226 int32_t faceIds[MAX_ROI];
5227 uint8_t faceScores[MAX_ROI];
5228 int32_t faceRectangles[MAX_ROI * 4];
5229 int32_t faceLandmarks[MAX_ROI * 6];
5230 size_t j = 0, k = 0;
5231
5232 for (size_t i = 0; i < numFaces; i++) {
5233 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5234 // Adjust crop region from sensor output coordinate system to active
5235 // array coordinate system.
5236 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5237 mCropRegionMapper.toActiveArray(rect.left, rect.top,
5238 rect.width, rect.height);
5239
5240 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5241 faceRectangles+j, -1);
5242
5243 j+= 4;
5244 }
5245 if (numFaces <= 0) {
5246 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5247 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5248 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5249 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5250 }
5251
5252 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5253 numFaces);
5254 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5255 faceRectangles, numFaces * 4U);
5256 if (fwk_faceDetectMode ==
5257 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5258 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5259 CAM_INTF_META_FACE_LANDMARK, metadata) {
5260
5261 for (size_t i = 0; i < numFaces; i++) {
5262 // Map the co-ordinate sensor output coordinate system to active
5263 // array coordinate system.
5264 mCropRegionMapper.toActiveArray(
5265 landmarks->face_landmarks[i].left_eye_center.x,
5266 landmarks->face_landmarks[i].left_eye_center.y);
5267 mCropRegionMapper.toActiveArray(
5268 landmarks->face_landmarks[i].right_eye_center.x,
5269 landmarks->face_landmarks[i].right_eye_center.y);
5270 mCropRegionMapper.toActiveArray(
5271 landmarks->face_landmarks[i].mouth_center.x,
5272 landmarks->face_landmarks[i].mouth_center.y);
5273
5274 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07005275 k+= TOTAL_LANDMARK_INDICES;
5276 }
5277 } else {
5278 for (size_t i = 0; i < numFaces; i++) {
5279 setInvalidLandmarks(faceLandmarks+k);
5280 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 }
5282 }
5283
5284 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5285 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5286 faceLandmarks, numFaces * 6U);
5287 }
5288 }
5289 }
5290 }
5291 }
5292
5293 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5294 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5295 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005296
5297 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
5298 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
5299 // process histogram statistics info
5300 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
5301 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
5302 cam_histogram_data_t rHistData, gHistData, bHistData;
5303 memset(&rHistData, 0, sizeof(rHistData));
5304 memset(&gHistData, 0, sizeof(gHistData));
5305 memset(&bHistData, 0, sizeof(bHistData));
5306
5307 switch (stats_data->type) {
5308 case CAM_HISTOGRAM_TYPE_BAYER:
5309 switch (stats_data->bayer_stats.data_type) {
5310 case CAM_STATS_CHANNEL_GR:
5311 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
5312 break;
5313 case CAM_STATS_CHANNEL_GB:
5314 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
5315 break;
5316 case CAM_STATS_CHANNEL_B:
5317 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
5318 break;
5319 case CAM_STATS_CHANNEL_ALL:
5320 rHistData = stats_data->bayer_stats.r_stats;
5321 //Framework expects only 3 channels. So, for now,
5322 //use gb stats for G channel.
5323 gHistData = stats_data->bayer_stats.gb_stats;
5324 bHistData = stats_data->bayer_stats.b_stats;
5325 break;
5326 case CAM_STATS_CHANNEL_Y:
5327 case CAM_STATS_CHANNEL_R:
5328 default:
5329 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
5330 break;
5331 }
5332 break;
5333 case CAM_HISTOGRAM_TYPE_YUV:
5334 rHistData = gHistData = bHistData = stats_data->yuv_stats;
5335 break;
5336 }
5337
5338 memcpy(hist_buf, rHistData.hist_buf, hist_size);
5339 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
5340 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
5341
5342 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
5343 }
5344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005345 }
5346
5347 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5348 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5349 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5350 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5351 }
5352
5353 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5354 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5355 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5356 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5357 }
5358
5359 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5360 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5361 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5362 CAM_MAX_SHADING_MAP_HEIGHT);
5363 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5364 CAM_MAX_SHADING_MAP_WIDTH);
5365 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5366 lensShadingMap->lens_shading, 4U * map_width * map_height);
5367 }
5368
5369 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5370 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5371 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5372 }
5373
5374 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5375 //Populate CAM_INTF_META_TONEMAP_CURVES
5376 /* ch0 = G, ch 1 = B, ch 2 = R*/
5377 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5378 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5379 tonemap->tonemap_points_cnt,
5380 CAM_MAX_TONEMAP_CURVE_SIZE);
5381 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5382 }
5383
5384 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5385 &tonemap->curves[0].tonemap_points[0][0],
5386 tonemap->tonemap_points_cnt * 2);
5387
5388 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5389 &tonemap->curves[1].tonemap_points[0][0],
5390 tonemap->tonemap_points_cnt * 2);
5391
5392 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5393 &tonemap->curves[2].tonemap_points[0][0],
5394 tonemap->tonemap_points_cnt * 2);
5395 }
5396
5397 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5398 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5399 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5400 CC_GAIN_MAX);
5401 }
5402
5403 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5404 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5405 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5406 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5407 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5408 }
5409
5410 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5411 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5412 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5413 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5414 toneCurve->tonemap_points_cnt,
5415 CAM_MAX_TONEMAP_CURVE_SIZE);
5416 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5417 }
5418 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5419 (float*)toneCurve->curve.tonemap_points,
5420 toneCurve->tonemap_points_cnt * 2);
5421 }
5422
5423 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5424 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5425 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5426 predColorCorrectionGains->gains, 4);
5427 }
5428
5429 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5430 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5431 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5432 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5433 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5434 }
5435
5436 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5437 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5438 }
5439
5440 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5441 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5442 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5443 }
5444
5445 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5446 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5447 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5448 }
5449
5450 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5451 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5452 *effectMode);
5453 if (NAME_NOT_FOUND != val) {
5454 uint8_t fwk_effectMode = (uint8_t)val;
5455 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5456 }
5457 }
5458
5459 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5460 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5461 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5462 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5463 if (NAME_NOT_FOUND != fwk_testPatternMode) {
5464 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5465 }
5466 int32_t fwk_testPatternData[4];
5467 fwk_testPatternData[0] = testPatternData->r;
5468 fwk_testPatternData[3] = testPatternData->b;
5469 switch (gCamCapability[mCameraId]->color_arrangement) {
5470 case CAM_FILTER_ARRANGEMENT_RGGB:
5471 case CAM_FILTER_ARRANGEMENT_GRBG:
5472 fwk_testPatternData[1] = testPatternData->gr;
5473 fwk_testPatternData[2] = testPatternData->gb;
5474 break;
5475 case CAM_FILTER_ARRANGEMENT_GBRG:
5476 case CAM_FILTER_ARRANGEMENT_BGGR:
5477 fwk_testPatternData[2] = testPatternData->gr;
5478 fwk_testPatternData[1] = testPatternData->gb;
5479 break;
5480 default:
5481 LOGE("color arrangement %d is not supported",
5482 gCamCapability[mCameraId]->color_arrangement);
5483 break;
5484 }
5485 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5486 }
5487
5488 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5489 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5490 }
5491
5492 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5493 String8 str((const char *)gps_methods);
5494 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5495 }
5496
5497 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5498 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5499 }
5500
5501 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5502 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5503 }
5504
5505 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5506 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5507 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5508 }
5509
5510 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5511 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5512 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5513 }
5514
5515 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5516 int32_t fwk_thumb_size[2];
5517 fwk_thumb_size[0] = thumb_size->width;
5518 fwk_thumb_size[1] = thumb_size->height;
5519 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5520 }
5521
5522 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5523 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5524 privateData,
5525 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5526 }
5527
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005528 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
5529 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
5530 meteringMode, 1);
5531 }
5532
Thierry Strudel3d639192016-09-09 11:52:26 -07005533 if (metadata->is_tuning_params_valid) {
5534 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5535 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5536 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5537
5538
5539 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5540 sizeof(uint32_t));
5541 data += sizeof(uint32_t);
5542
5543 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5544 sizeof(uint32_t));
5545 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5546 data += sizeof(uint32_t);
5547
5548 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5549 sizeof(uint32_t));
5550 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5551 data += sizeof(uint32_t);
5552
5553 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5554 sizeof(uint32_t));
5555 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5556 data += sizeof(uint32_t);
5557
5558 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5559 sizeof(uint32_t));
5560 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5561 data += sizeof(uint32_t);
5562
5563 metadata->tuning_params.tuning_mod3_data_size = 0;
5564 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5565 sizeof(uint32_t));
5566 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5567 data += sizeof(uint32_t);
5568
5569 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5570 TUNING_SENSOR_DATA_MAX);
5571 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5572 count);
5573 data += count;
5574
5575 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5576 TUNING_VFE_DATA_MAX);
5577 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5578 count);
5579 data += count;
5580
5581 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5582 TUNING_CPP_DATA_MAX);
5583 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5584 count);
5585 data += count;
5586
5587 count = MIN(metadata->tuning_params.tuning_cac_data_size,
5588 TUNING_CAC_DATA_MAX);
5589 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5590 count);
5591 data += count;
5592
5593 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5594 (int32_t *)(void *)tuning_meta_data_blob,
5595 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5596 }
5597
5598 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5599 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5600 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5601 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5602 NEUTRAL_COL_POINTS);
5603 }
5604
5605 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5606 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5607 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5608 }
5609
5610 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5611 int32_t aeRegions[REGIONS_TUPLE_COUNT];
5612 // Adjust crop region from sensor output coordinate system to active
5613 // array coordinate system.
5614 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5615 hAeRegions->rect.width, hAeRegions->rect.height);
5616
5617 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5618 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5619 REGIONS_TUPLE_COUNT);
5620 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5621 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5622 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5623 hAeRegions->rect.height);
5624 }
5625
5626 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5627 uint8_t fwk_afState = (uint8_t) *afState;
5628 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5629 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5630 }
5631
5632 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5633 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5634 }
5635
5636 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5637 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5638 }
5639
5640 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5641 uint8_t fwk_lensState = *lensState;
5642 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5643 }
5644
5645 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5646 /*af regions*/
5647 int32_t afRegions[REGIONS_TUPLE_COUNT];
5648 // Adjust crop region from sensor output coordinate system to active
5649 // array coordinate system.
5650 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5651 hAfRegions->rect.width, hAfRegions->rect.height);
5652
5653 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5654 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5655 REGIONS_TUPLE_COUNT);
5656 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5657 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5658 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5659 hAfRegions->rect.height);
5660 }
5661
5662 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5663 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5664 *hal_ab_mode);
5665 if (NAME_NOT_FOUND != val) {
5666 uint8_t fwk_ab_mode = (uint8_t)val;
5667 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5668 }
5669 }
5670
5671 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5672 int val = lookupFwkName(SCENE_MODES_MAP,
5673 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5674 if (NAME_NOT_FOUND != val) {
5675 uint8_t fwkBestshotMode = (uint8_t)val;
5676 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5677 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5678 } else {
5679 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5680 }
5681 }
5682
5683 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5684 uint8_t fwk_mode = (uint8_t) *mode;
5685 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5686 }
5687
5688 /* Constant metadata values to be update*/
5689 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5690 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5691
5692 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5693 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5694
5695 int32_t hotPixelMap[2];
5696 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5697
5698 // CDS
5699 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5700 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5701 }
5702
Thierry Strudel04e026f2016-10-10 11:27:36 -07005703 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
5704 int32_t fwk_hdr;
5705 if(*vhdr == CAM_SENSOR_HDR_OFF) {
5706 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
5707 } else {
5708 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
5709 }
5710 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
5711 }
5712
5713 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005714 int32_t fwk_ir = (int32_t) *ir;
5715 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07005716 }
5717
Thierry Strudel269c81a2016-10-12 12:13:59 -07005718 // AEC SPEED
5719 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
5720 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
5721 }
5722
5723 // AWB SPEED
5724 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
5725 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
5726 }
5727
Thierry Strudel3d639192016-09-09 11:52:26 -07005728 // TNR
5729 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5730 uint8_t tnr_enable = tnr->denoise_enable;
5731 int32_t tnr_process_type = (int32_t)tnr->process_plates;
5732
5733 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5734 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5735 }
5736
5737 // Reprocess crop data
5738 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5739 uint8_t cnt = crop_data->num_of_streams;
5740 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5741 // mm-qcamera-daemon only posts crop_data for streams
5742 // not linked to pproc. So no valid crop metadata is not
5743 // necessarily an error case.
5744 LOGD("No valid crop metadata entries");
5745 } else {
5746 uint32_t reproc_stream_id;
5747 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5748 LOGD("No reprocessible stream found, ignore crop data");
5749 } else {
5750 int rc = NO_ERROR;
5751 Vector<int32_t> roi_map;
5752 int32_t *crop = new int32_t[cnt*4];
5753 if (NULL == crop) {
5754 rc = NO_MEMORY;
5755 }
5756 if (NO_ERROR == rc) {
5757 int32_t streams_found = 0;
5758 for (size_t i = 0; i < cnt; i++) {
5759 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5760 if (pprocDone) {
5761 // HAL already does internal reprocessing,
5762 // either via reprocessing before JPEG encoding,
5763 // or offline postprocessing for pproc bypass case.
5764 crop[0] = 0;
5765 crop[1] = 0;
5766 crop[2] = mInputStreamInfo.dim.width;
5767 crop[3] = mInputStreamInfo.dim.height;
5768 } else {
5769 crop[0] = crop_data->crop_info[i].crop.left;
5770 crop[1] = crop_data->crop_info[i].crop.top;
5771 crop[2] = crop_data->crop_info[i].crop.width;
5772 crop[3] = crop_data->crop_info[i].crop.height;
5773 }
5774 roi_map.add(crop_data->crop_info[i].roi_map.left);
5775 roi_map.add(crop_data->crop_info[i].roi_map.top);
5776 roi_map.add(crop_data->crop_info[i].roi_map.width);
5777 roi_map.add(crop_data->crop_info[i].roi_map.height);
5778 streams_found++;
5779 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5780 crop[0], crop[1], crop[2], crop[3]);
5781 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5782 crop_data->crop_info[i].roi_map.left,
5783 crop_data->crop_info[i].roi_map.top,
5784 crop_data->crop_info[i].roi_map.width,
5785 crop_data->crop_info[i].roi_map.height);
5786 break;
5787
5788 }
5789 }
5790 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5791 &streams_found, 1);
5792 camMetadata.update(QCAMERA3_CROP_REPROCESS,
5793 crop, (size_t)(streams_found * 4));
5794 if (roi_map.array()) {
5795 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5796 roi_map.array(), roi_map.size());
5797 }
5798 }
5799 if (crop) {
5800 delete [] crop;
5801 }
5802 }
5803 }
5804 }
5805
5806 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5807 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5808 // so hardcoding the CAC result to OFF mode.
5809 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5810 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5811 } else {
5812 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5813 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5814 *cacMode);
5815 if (NAME_NOT_FOUND != val) {
5816 uint8_t resultCacMode = (uint8_t)val;
5817 // check whether CAC result from CB is equal to Framework set CAC mode
5818 // If not equal then set the CAC mode came in corresponding request
5819 if (fwk_cacMode != resultCacMode) {
5820 resultCacMode = fwk_cacMode;
5821 }
5822 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5823 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5824 } else {
5825 LOGE("Invalid CAC camera parameter: %d", *cacMode);
5826 }
5827 }
5828 }
5829
5830 // Post blob of cam_cds_data through vendor tag.
5831 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5832 uint8_t cnt = cdsInfo->num_of_streams;
5833 cam_cds_data_t cdsDataOverride;
5834 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5835 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5836 cdsDataOverride.num_of_streams = 1;
5837 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5838 uint32_t reproc_stream_id;
5839 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5840 LOGD("No reprocessible stream found, ignore cds data");
5841 } else {
5842 for (size_t i = 0; i < cnt; i++) {
5843 if (cdsInfo->cds_info[i].stream_id ==
5844 reproc_stream_id) {
5845 cdsDataOverride.cds_info[0].cds_enable =
5846 cdsInfo->cds_info[i].cds_enable;
5847 break;
5848 }
5849 }
5850 }
5851 } else {
5852 LOGD("Invalid stream count %d in CDS_DATA", cnt);
5853 }
5854 camMetadata.update(QCAMERA3_CDS_INFO,
5855 (uint8_t *)&cdsDataOverride,
5856 sizeof(cam_cds_data_t));
5857 }
5858
5859 // Ldaf calibration data
5860 if (!mLdafCalibExist) {
5861 IF_META_AVAILABLE(uint32_t, ldafCalib,
5862 CAM_INTF_META_LDAF_EXIF, metadata) {
5863 mLdafCalibExist = true;
5864 mLdafCalib[0] = ldafCalib[0];
5865 mLdafCalib[1] = ldafCalib[1];
5866 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5867 ldafCalib[0], ldafCalib[1]);
5868 }
5869 }
5870
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005871 // Reprocess and DDM debug data through vendor tag
5872 cam_reprocess_info_t repro_info;
5873 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005874 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
5875 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005876 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005877 }
5878 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
5879 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005880 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005881 }
5882 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
5883 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005884 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005885 }
5886 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
5887 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005888 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005889 }
5890 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
5891 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005892 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005893 }
5894 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005895 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005896 }
5897 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
5898 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005899 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005900 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07005901 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
5902 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
5903 }
5904 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
5905 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
5906 }
5907 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
5908 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005909
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005910 // INSTANT AEC MODE
5911 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
5912 CAM_INTF_PARM_INSTANT_AEC, metadata) {
5913 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
5914 }
5915
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005916 /* In batch mode, cache the first metadata in the batch */
5917 if (mBatchSize && firstMetadataInBatch) {
5918 mCachedMetadata.clear();
5919 mCachedMetadata = camMetadata;
5920 }
5921
Thierry Strudel3d639192016-09-09 11:52:26 -07005922 resultMetadata = camMetadata.release();
5923 return resultMetadata;
5924}
5925
5926/*===========================================================================
5927 * FUNCTION : saveExifParams
5928 *
5929 * DESCRIPTION:
5930 *
5931 * PARAMETERS :
5932 * @metadata : metadata information from callback
5933 *
5934 * RETURN : none
5935 *
5936 *==========================================================================*/
5937void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5938{
5939 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5940 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5941 if (mExifParams.debug_params) {
5942 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5943 mExifParams.debug_params->ae_debug_params_valid = TRUE;
5944 }
5945 }
5946 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5947 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5948 if (mExifParams.debug_params) {
5949 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5950 mExifParams.debug_params->awb_debug_params_valid = TRUE;
5951 }
5952 }
5953 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5954 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5955 if (mExifParams.debug_params) {
5956 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5957 mExifParams.debug_params->af_debug_params_valid = TRUE;
5958 }
5959 }
5960 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5961 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5962 if (mExifParams.debug_params) {
5963 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5964 mExifParams.debug_params->asd_debug_params_valid = TRUE;
5965 }
5966 }
5967 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5968 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5969 if (mExifParams.debug_params) {
5970 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5971 mExifParams.debug_params->stats_debug_params_valid = TRUE;
5972 }
5973 }
5974 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
5975 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
5976 if (mExifParams.debug_params) {
5977 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
5978 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
5979 }
5980 }
5981 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
5982 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
5983 if (mExifParams.debug_params) {
5984 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
5985 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
5986 }
5987 }
5988 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
5989 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
5990 if (mExifParams.debug_params) {
5991 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
5992 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
5993 }
5994 }
5995}
5996
5997/*===========================================================================
5998 * FUNCTION : get3AExifParams
5999 *
6000 * DESCRIPTION:
6001 *
6002 * PARAMETERS : none
6003 *
6004 *
6005 * RETURN : mm_jpeg_exif_params_t
6006 *
6007 *==========================================================================*/
6008mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6009{
6010 return mExifParams;
6011}
6012
6013/*===========================================================================
6014 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6015 *
6016 * DESCRIPTION:
6017 *
6018 * PARAMETERS :
6019 * @metadata : metadata information from callback
6020 *
6021 * RETURN : camera_metadata_t*
6022 * metadata in a format specified by fwk
6023 *==========================================================================*/
6024camera_metadata_t*
6025QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6026 (metadata_buffer_t *metadata)
6027{
6028 CameraMetadata camMetadata;
6029 camera_metadata_t *resultMetadata;
6030
6031
6032 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6033 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6034 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6035 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6036 }
6037
6038 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6039 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6040 &aecTrigger->trigger, 1);
6041 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6042 &aecTrigger->trigger_id, 1);
6043 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6044 aecTrigger->trigger);
6045 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6046 aecTrigger->trigger_id);
6047 }
6048
6049 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6050 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6051 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6052 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6053 }
6054
6055 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6056 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6057 if (NAME_NOT_FOUND != val) {
6058 uint8_t fwkAfMode = (uint8_t)val;
6059 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6060 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6061 } else {
6062 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6063 val);
6064 }
6065 }
6066
6067 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6068 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6069 &af_trigger->trigger, 1);
6070 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6071 af_trigger->trigger);
6072 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6073 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6074 af_trigger->trigger_id);
6075 }
6076
6077 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6078 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6079 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6080 if (NAME_NOT_FOUND != val) {
6081 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6082 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6083 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6084 } else {
6085 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6086 }
6087 }
6088
6089 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6090 uint32_t aeMode = CAM_AE_MODE_MAX;
6091 int32_t flashMode = CAM_FLASH_MODE_MAX;
6092 int32_t redeye = -1;
6093 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6094 aeMode = *pAeMode;
6095 }
6096 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6097 flashMode = *pFlashMode;
6098 }
6099 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6100 redeye = *pRedeye;
6101 }
6102
6103 if (1 == redeye) {
6104 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6105 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6106 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6107 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6108 flashMode);
6109 if (NAME_NOT_FOUND != val) {
6110 fwk_aeMode = (uint8_t)val;
6111 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6112 } else {
6113 LOGE("Unsupported flash mode %d", flashMode);
6114 }
6115 } else if (aeMode == CAM_AE_MODE_ON) {
6116 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6117 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6118 } else if (aeMode == CAM_AE_MODE_OFF) {
6119 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6120 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6121 } else {
6122 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6123 "flashMode:%d, aeMode:%u!!!",
6124 redeye, flashMode, aeMode);
6125 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006126 if (mInstantAEC) {
6127 // Increment frame Idx count untill a bound reached for instant AEC.
6128 mInstantAecFrameIdxCount++;
6129 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
6130 CAM_INTF_META_AEC_INFO, metadata) {
6131 LOGH("ae_params->settled = %d",ae_params->settled);
6132 // If AEC settled, or if number of frames reached bound value,
6133 // should reset instant AEC.
6134 if (ae_params->settled ||
6135 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
6136 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
6137 mInstantAEC = false;
6138 mResetInstantAEC = true;
6139 mInstantAecFrameIdxCount = 0;
6140 }
6141 }
6142 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006143 resultMetadata = camMetadata.release();
6144 return resultMetadata;
6145}
6146
6147/*===========================================================================
6148 * FUNCTION : dumpMetadataToFile
6149 *
6150 * DESCRIPTION: Dumps tuning metadata to file system
6151 *
6152 * PARAMETERS :
6153 * @meta : tuning metadata
6154 * @dumpFrameCount : current dump frame count
6155 * @enabled : Enable mask
6156 *
6157 *==========================================================================*/
6158void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6159 uint32_t &dumpFrameCount,
6160 bool enabled,
6161 const char *type,
6162 uint32_t frameNumber)
6163{
6164 //Some sanity checks
6165 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6166 LOGE("Tuning sensor data size bigger than expected %d: %d",
6167 meta.tuning_sensor_data_size,
6168 TUNING_SENSOR_DATA_MAX);
6169 return;
6170 }
6171
6172 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6173 LOGE("Tuning VFE data size bigger than expected %d: %d",
6174 meta.tuning_vfe_data_size,
6175 TUNING_VFE_DATA_MAX);
6176 return;
6177 }
6178
6179 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6180 LOGE("Tuning CPP data size bigger than expected %d: %d",
6181 meta.tuning_cpp_data_size,
6182 TUNING_CPP_DATA_MAX);
6183 return;
6184 }
6185
6186 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6187 LOGE("Tuning CAC data size bigger than expected %d: %d",
6188 meta.tuning_cac_data_size,
6189 TUNING_CAC_DATA_MAX);
6190 return;
6191 }
6192 //
6193
6194 if(enabled){
6195 char timeBuf[FILENAME_MAX];
6196 char buf[FILENAME_MAX];
6197 memset(buf, 0, sizeof(buf));
6198 memset(timeBuf, 0, sizeof(timeBuf));
6199 time_t current_time;
6200 struct tm * timeinfo;
6201 time (&current_time);
6202 timeinfo = localtime (&current_time);
6203 if (timeinfo != NULL) {
6204 strftime (timeBuf, sizeof(timeBuf),
6205 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
6206 }
6207 String8 filePath(timeBuf);
6208 snprintf(buf,
6209 sizeof(buf),
6210 "%dm_%s_%d.bin",
6211 dumpFrameCount,
6212 type,
6213 frameNumber);
6214 filePath.append(buf);
6215 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6216 if (file_fd >= 0) {
6217 ssize_t written_len = 0;
6218 meta.tuning_data_version = TUNING_DATA_VERSION;
6219 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6220 written_len += write(file_fd, data, sizeof(uint32_t));
6221 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6222 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6223 written_len += write(file_fd, data, sizeof(uint32_t));
6224 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6225 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6226 written_len += write(file_fd, data, sizeof(uint32_t));
6227 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6228 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6229 written_len += write(file_fd, data, sizeof(uint32_t));
6230 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6231 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6232 written_len += write(file_fd, data, sizeof(uint32_t));
6233 meta.tuning_mod3_data_size = 0;
6234 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6235 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6236 written_len += write(file_fd, data, sizeof(uint32_t));
6237 size_t total_size = meta.tuning_sensor_data_size;
6238 data = (void *)((uint8_t *)&meta.data);
6239 written_len += write(file_fd, data, total_size);
6240 total_size = meta.tuning_vfe_data_size;
6241 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6242 written_len += write(file_fd, data, total_size);
6243 total_size = meta.tuning_cpp_data_size;
6244 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6245 written_len += write(file_fd, data, total_size);
6246 total_size = meta.tuning_cac_data_size;
6247 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6248 written_len += write(file_fd, data, total_size);
6249 close(file_fd);
6250 }else {
6251 LOGE("fail to open file for metadata dumping");
6252 }
6253 }
6254}
6255
6256/*===========================================================================
6257 * FUNCTION : cleanAndSortStreamInfo
6258 *
6259 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6260 * and sort them such that raw stream is at the end of the list
6261 * This is a workaround for camera daemon constraint.
6262 *
6263 * PARAMETERS : None
6264 *
6265 *==========================================================================*/
6266void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6267{
6268 List<stream_info_t *> newStreamInfo;
6269
6270 /*clean up invalid streams*/
6271 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6272 it != mStreamInfo.end();) {
6273 if(((*it)->status) == INVALID){
6274 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6275 delete channel;
6276 free(*it);
6277 it = mStreamInfo.erase(it);
6278 } else {
6279 it++;
6280 }
6281 }
6282
6283 // Move preview/video/callback/snapshot streams into newList
6284 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6285 it != mStreamInfo.end();) {
6286 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6287 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6288 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6289 newStreamInfo.push_back(*it);
6290 it = mStreamInfo.erase(it);
6291 } else
6292 it++;
6293 }
6294 // Move raw streams into newList
6295 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6296 it != mStreamInfo.end();) {
6297 newStreamInfo.push_back(*it);
6298 it = mStreamInfo.erase(it);
6299 }
6300
6301 mStreamInfo = newStreamInfo;
6302}
6303
6304/*===========================================================================
6305 * FUNCTION : extractJpegMetadata
6306 *
6307 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6308 * JPEG metadata is cached in HAL, and return as part of capture
6309 * result when metadata is returned from camera daemon.
6310 *
6311 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6312 * @request: capture request
6313 *
6314 *==========================================================================*/
6315void QCamera3HardwareInterface::extractJpegMetadata(
6316 CameraMetadata& jpegMetadata,
6317 const camera3_capture_request_t *request)
6318{
6319 CameraMetadata frame_settings;
6320 frame_settings = request->settings;
6321
6322 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6323 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6324 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6325 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6326
6327 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6328 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6329 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6330 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6331
6332 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6333 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6334 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6335 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6336
6337 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6338 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6339 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6340 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6341
6342 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6343 jpegMetadata.update(ANDROID_JPEG_QUALITY,
6344 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6345 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6346
6347 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6348 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6349 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6350 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6351
6352 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6353 int32_t thumbnail_size[2];
6354 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6355 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6356 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6357 int32_t orientation =
6358 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006359 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6361 int32_t temp;
6362 temp = thumbnail_size[0];
6363 thumbnail_size[0] = thumbnail_size[1];
6364 thumbnail_size[1] = temp;
6365 }
6366 }
6367 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6368 thumbnail_size,
6369 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6370 }
6371
6372}
6373
6374/*===========================================================================
6375 * FUNCTION : convertToRegions
6376 *
6377 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6378 *
6379 * PARAMETERS :
6380 * @rect : cam_rect_t struct to convert
6381 * @region : int32_t destination array
6382 * @weight : if we are converting from cam_area_t, weight is valid
6383 * else weight = -1
6384 *
6385 *==========================================================================*/
6386void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6387 int32_t *region, int weight)
6388{
6389 region[0] = rect.left;
6390 region[1] = rect.top;
6391 region[2] = rect.left + rect.width;
6392 region[3] = rect.top + rect.height;
6393 if (weight > -1) {
6394 region[4] = weight;
6395 }
6396}
6397
6398/*===========================================================================
6399 * FUNCTION : convertFromRegions
6400 *
6401 * DESCRIPTION: helper method to convert from array to cam_rect_t
6402 *
6403 * PARAMETERS :
6404 * @rect : cam_rect_t struct to convert
6405 * @region : int32_t destination array
6406 * @weight : if we are converting from cam_area_t, weight is valid
6407 * else weight = -1
6408 *
6409 *==========================================================================*/
6410void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6411 const camera_metadata_t *settings, uint32_t tag)
6412{
6413 CameraMetadata frame_settings;
6414 frame_settings = settings;
6415 int32_t x_min = frame_settings.find(tag).data.i32[0];
6416 int32_t y_min = frame_settings.find(tag).data.i32[1];
6417 int32_t x_max = frame_settings.find(tag).data.i32[2];
6418 int32_t y_max = frame_settings.find(tag).data.i32[3];
6419 roi.weight = frame_settings.find(tag).data.i32[4];
6420 roi.rect.left = x_min;
6421 roi.rect.top = y_min;
6422 roi.rect.width = x_max - x_min;
6423 roi.rect.height = y_max - y_min;
6424}
6425
6426/*===========================================================================
6427 * FUNCTION : resetIfNeededROI
6428 *
6429 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6430 * crop region
6431 *
6432 * PARAMETERS :
6433 * @roi : cam_area_t struct to resize
6434 * @scalerCropRegion : cam_crop_region_t region to compare against
6435 *
6436 *
6437 *==========================================================================*/
6438bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6439 const cam_crop_region_t* scalerCropRegion)
6440{
6441 int32_t roi_x_max = roi->rect.width + roi->rect.left;
6442 int32_t roi_y_max = roi->rect.height + roi->rect.top;
6443 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6444 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6445
6446 /* According to spec weight = 0 is used to indicate roi needs to be disabled
6447 * without having this check the calculations below to validate if the roi
6448 * is inside scalar crop region will fail resulting in the roi not being
6449 * reset causing algorithm to continue to use stale roi window
6450 */
6451 if (roi->weight == 0) {
6452 return true;
6453 }
6454
6455 if ((roi_x_max < scalerCropRegion->left) ||
6456 // right edge of roi window is left of scalar crop's left edge
6457 (roi_y_max < scalerCropRegion->top) ||
6458 // bottom edge of roi window is above scalar crop's top edge
6459 (roi->rect.left > crop_x_max) ||
6460 // left edge of roi window is beyond(right) of scalar crop's right edge
6461 (roi->rect.top > crop_y_max)){
6462 // top edge of roi windo is above scalar crop's top edge
6463 return false;
6464 }
6465 if (roi->rect.left < scalerCropRegion->left) {
6466 roi->rect.left = scalerCropRegion->left;
6467 }
6468 if (roi->rect.top < scalerCropRegion->top) {
6469 roi->rect.top = scalerCropRegion->top;
6470 }
6471 if (roi_x_max > crop_x_max) {
6472 roi_x_max = crop_x_max;
6473 }
6474 if (roi_y_max > crop_y_max) {
6475 roi_y_max = crop_y_max;
6476 }
6477 roi->rect.width = roi_x_max - roi->rect.left;
6478 roi->rect.height = roi_y_max - roi->rect.top;
6479 return true;
6480}
6481
6482/*===========================================================================
6483 * FUNCTION : convertLandmarks
6484 *
6485 * DESCRIPTION: helper method to extract the landmarks from face detection info
6486 *
6487 * PARAMETERS :
6488 * @landmark_data : input landmark data to be converted
6489 * @landmarks : int32_t destination array
6490 *
6491 *
6492 *==========================================================================*/
6493void QCamera3HardwareInterface::convertLandmarks(
6494 cam_face_landmarks_info_t landmark_data,
6495 int32_t *landmarks)
6496{
Thierry Strudel04e026f2016-10-10 11:27:36 -07006497 if (landmark_data.is_left_eye_valid) {
6498 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
6499 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
6500 } else {
6501 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
6502 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
6503 }
6504
6505 if (landmark_data.is_right_eye_valid) {
6506 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
6507 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
6508 } else {
6509 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
6510 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
6511 }
6512
6513 if (landmark_data.is_mouth_valid) {
6514 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
6515 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
6516 } else {
6517 landmarks[MOUTH_X] = FACE_INVALID_POINT;
6518 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
6519 }
6520}
6521
6522/*===========================================================================
6523 * FUNCTION : setInvalidLandmarks
6524 *
6525 * DESCRIPTION: helper method to set invalid landmarks
6526 *
6527 * PARAMETERS :
6528 * @landmarks : int32_t destination array
6529 *
6530 *
6531 *==========================================================================*/
6532void QCamera3HardwareInterface::setInvalidLandmarks(
6533 int32_t *landmarks)
6534{
6535 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
6536 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
6537 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
6538 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
6539 landmarks[MOUTH_X] = FACE_INVALID_POINT;
6540 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07006541}
6542
6543#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006544
6545/*===========================================================================
6546 * FUNCTION : getCapabilities
6547 *
6548 * DESCRIPTION: query camera capability from back-end
6549 *
6550 * PARAMETERS :
6551 * @ops : mm-interface ops structure
6552 * @cam_handle : camera handle for which we need capability
6553 *
6554 * RETURN : ptr type of capability structure
6555 * capability for success
6556 * NULL for failure
6557 *==========================================================================*/
6558cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
6559 uint32_t cam_handle)
6560{
6561 int rc = NO_ERROR;
6562 QCamera3HeapMemory *capabilityHeap = NULL;
6563 cam_capability_t *cap_ptr = NULL;
6564
6565 if (ops == NULL) {
6566 LOGE("Invalid arguments");
6567 return NULL;
6568 }
6569
6570 capabilityHeap = new QCamera3HeapMemory(1);
6571 if (capabilityHeap == NULL) {
6572 LOGE("creation of capabilityHeap failed");
6573 return NULL;
6574 }
6575
6576 /* Allocate memory for capability buffer */
6577 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6578 if(rc != OK) {
6579 LOGE("No memory for cappability");
6580 goto allocate_failed;
6581 }
6582
6583 /* Map memory for capability buffer */
6584 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6585
6586 rc = ops->map_buf(cam_handle,
6587 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
6588 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
6589 if(rc < 0) {
6590 LOGE("failed to map capability buffer");
6591 rc = FAILED_TRANSACTION;
6592 goto map_failed;
6593 }
6594
6595 /* Query Capability */
6596 rc = ops->query_capability(cam_handle);
6597 if(rc < 0) {
6598 LOGE("failed to query capability");
6599 rc = FAILED_TRANSACTION;
6600 goto query_failed;
6601 }
6602
6603 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6604 if (cap_ptr == NULL) {
6605 LOGE("out of memory");
6606 rc = NO_MEMORY;
6607 goto query_failed;
6608 }
6609
6610 memset(cap_ptr, 0, sizeof(cam_capability_t));
6611 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
6612
6613 int index;
6614 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6615 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
6616 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6617 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6618 }
6619
6620query_failed:
6621 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
6622map_failed:
6623 capabilityHeap->deallocate();
6624allocate_failed:
6625 delete capabilityHeap;
6626
6627 if (rc != NO_ERROR) {
6628 return NULL;
6629 } else {
6630 return cap_ptr;
6631 }
6632}
6633
Thierry Strudel3d639192016-09-09 11:52:26 -07006634/*===========================================================================
6635 * FUNCTION : initCapabilities
6636 *
6637 * DESCRIPTION: initialize camera capabilities in static data struct
6638 *
6639 * PARAMETERS :
6640 * @cameraId : camera Id
6641 *
6642 * RETURN : int32_t type of status
6643 * NO_ERROR -- success
6644 * none-zero failure code
6645 *==========================================================================*/
6646int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6647{
6648 int rc = 0;
6649 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006650 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07006651
6652 rc = camera_open((uint8_t)cameraId, &cameraHandle);
6653 if (rc) {
6654 LOGE("camera_open failed. rc = %d", rc);
6655 goto open_failed;
6656 }
6657 if (!cameraHandle) {
6658 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6659 goto open_failed;
6660 }
6661
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006662 handle = get_main_camera_handle(cameraHandle->camera_handle);
6663 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
6664 if (gCamCapability[cameraId] == NULL) {
6665 rc = FAILED_TRANSACTION;
6666 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07006667 }
6668
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006669 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006670 if (is_dual_camera_by_idx(cameraId)) {
6671 handle = get_aux_camera_handle(cameraHandle->camera_handle);
6672 gCamCapability[cameraId]->aux_cam_cap =
6673 getCapabilities(cameraHandle->ops, handle);
6674 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
6675 rc = FAILED_TRANSACTION;
6676 free(gCamCapability[cameraId]);
6677 goto failed_op;
6678 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006679 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006680failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07006681 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6682 cameraHandle = NULL;
6683open_failed:
6684 return rc;
6685}
6686
6687/*==========================================================================
6688 * FUNCTION : get3Aversion
6689 *
6690 * DESCRIPTION: get the Q3A S/W version
6691 *
6692 * PARAMETERS :
6693 * @sw_version: Reference of Q3A structure which will hold version info upon
6694 * return
6695 *
6696 * RETURN : None
6697 *
6698 *==========================================================================*/
6699void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6700{
6701 if(gCamCapability[mCameraId])
6702 sw_version = gCamCapability[mCameraId]->q3a_version;
6703 else
6704 LOGE("Capability structure NULL!");
6705}
6706
6707
6708/*===========================================================================
6709 * FUNCTION : initParameters
6710 *
6711 * DESCRIPTION: initialize camera parameters
6712 *
6713 * PARAMETERS :
6714 *
6715 * RETURN : int32_t type of status
6716 * NO_ERROR -- success
6717 * none-zero failure code
6718 *==========================================================================*/
6719int QCamera3HardwareInterface::initParameters()
6720{
6721 int rc = 0;
6722
6723 //Allocate Set Param Buffer
6724 mParamHeap = new QCamera3HeapMemory(1);
6725 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6726 if(rc != OK) {
6727 rc = NO_MEMORY;
6728 LOGE("Failed to allocate SETPARM Heap memory");
6729 delete mParamHeap;
6730 mParamHeap = NULL;
6731 return rc;
6732 }
6733
6734 //Map memory for parameters buffer
6735 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6736 CAM_MAPPING_BUF_TYPE_PARM_BUF,
6737 mParamHeap->getFd(0),
6738 sizeof(metadata_buffer_t),
6739 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6740 if(rc < 0) {
6741 LOGE("failed to map SETPARM buffer");
6742 rc = FAILED_TRANSACTION;
6743 mParamHeap->deallocate();
6744 delete mParamHeap;
6745 mParamHeap = NULL;
6746 return rc;
6747 }
6748
6749 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6750
6751 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6752 return rc;
6753}
6754
6755/*===========================================================================
6756 * FUNCTION : deinitParameters
6757 *
6758 * DESCRIPTION: de-initialize camera parameters
6759 *
6760 * PARAMETERS :
6761 *
6762 * RETURN : NONE
6763 *==========================================================================*/
6764void QCamera3HardwareInterface::deinitParameters()
6765{
6766 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6767 CAM_MAPPING_BUF_TYPE_PARM_BUF);
6768
6769 mParamHeap->deallocate();
6770 delete mParamHeap;
6771 mParamHeap = NULL;
6772
6773 mParameters = NULL;
6774
6775 free(mPrevParameters);
6776 mPrevParameters = NULL;
6777}
6778
6779/*===========================================================================
6780 * FUNCTION : calcMaxJpegSize
6781 *
6782 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6783 *
6784 * PARAMETERS :
6785 *
6786 * RETURN : max_jpeg_size
6787 *==========================================================================*/
6788size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6789{
6790 size_t max_jpeg_size = 0;
6791 size_t temp_width, temp_height;
6792 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6793 MAX_SIZES_CNT);
6794 for (size_t i = 0; i < count; i++) {
6795 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6796 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6797 if (temp_width * temp_height > max_jpeg_size ) {
6798 max_jpeg_size = temp_width * temp_height;
6799 }
6800 }
6801 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6802 return max_jpeg_size;
6803}
6804
6805/*===========================================================================
6806 * FUNCTION : getMaxRawSize
6807 *
6808 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6809 *
6810 * PARAMETERS :
6811 *
6812 * RETURN : Largest supported Raw Dimension
6813 *==========================================================================*/
6814cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6815{
6816 int max_width = 0;
6817 cam_dimension_t maxRawSize;
6818
6819 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6820 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6821 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6822 max_width = gCamCapability[camera_id]->raw_dim[i].width;
6823 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6824 }
6825 }
6826 return maxRawSize;
6827}
6828
6829
6830/*===========================================================================
6831 * FUNCTION : calcMaxJpegDim
6832 *
6833 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6834 *
6835 * PARAMETERS :
6836 *
6837 * RETURN : max_jpeg_dim
6838 *==========================================================================*/
6839cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6840{
6841 cam_dimension_t max_jpeg_dim;
6842 cam_dimension_t curr_jpeg_dim;
6843 max_jpeg_dim.width = 0;
6844 max_jpeg_dim.height = 0;
6845 curr_jpeg_dim.width = 0;
6846 curr_jpeg_dim.height = 0;
6847 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6848 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6849 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6850 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6851 max_jpeg_dim.width * max_jpeg_dim.height ) {
6852 max_jpeg_dim.width = curr_jpeg_dim.width;
6853 max_jpeg_dim.height = curr_jpeg_dim.height;
6854 }
6855 }
6856 return max_jpeg_dim;
6857}
6858
6859/*===========================================================================
6860 * FUNCTION : addStreamConfig
6861 *
6862 * DESCRIPTION: adds the stream configuration to the array
6863 *
6864 * PARAMETERS :
6865 * @available_stream_configs : pointer to stream configuration array
6866 * @scalar_format : scalar format
6867 * @dim : configuration dimension
6868 * @config_type : input or output configuration type
6869 *
6870 * RETURN : NONE
6871 *==========================================================================*/
6872void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6873 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6874{
6875 available_stream_configs.add(scalar_format);
6876 available_stream_configs.add(dim.width);
6877 available_stream_configs.add(dim.height);
6878 available_stream_configs.add(config_type);
6879}
6880
6881/*===========================================================================
6882 * FUNCTION : suppportBurstCapture
6883 *
6884 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6885 *
6886 * PARAMETERS :
6887 * @cameraId : camera Id
6888 *
6889 * RETURN : true if camera supports BURST_CAPTURE
6890 * false otherwise
6891 *==========================================================================*/
6892bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6893{
6894 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6895 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6896 const int32_t highResWidth = 3264;
6897 const int32_t highResHeight = 2448;
6898
6899 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6900 // Maximum resolution images cannot be captured at >= 10fps
6901 // -> not supporting BURST_CAPTURE
6902 return false;
6903 }
6904
6905 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6906 // Maximum resolution images can be captured at >= 20fps
6907 // --> supporting BURST_CAPTURE
6908 return true;
6909 }
6910
6911 // Find the smallest highRes resolution, or largest resolution if there is none
6912 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6913 MAX_SIZES_CNT);
6914 size_t highRes = 0;
6915 while ((highRes + 1 < totalCnt) &&
6916 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6917 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6918 highResWidth * highResHeight)) {
6919 highRes++;
6920 }
6921 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6922 return true;
6923 } else {
6924 return false;
6925 }
6926}
6927
6928/*===========================================================================
6929 * FUNCTION : initStaticMetadata
6930 *
6931 * DESCRIPTION: initialize the static metadata
6932 *
6933 * PARAMETERS :
6934 * @cameraId : camera Id
6935 *
6936 * RETURN : int32_t type of status
6937 * 0 -- success
6938 * non-zero failure code
6939 *==========================================================================*/
6940int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6941{
6942 int rc = 0;
6943 CameraMetadata staticInfo;
6944 size_t count = 0;
6945 bool limitedDevice = false;
6946 char prop[PROPERTY_VALUE_MAX];
6947 bool supportBurst = false;
6948
6949 supportBurst = supportBurstCapture(cameraId);
6950
6951 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6952 * guaranteed or if min fps of max resolution is less than 20 fps, its
6953 * advertised as limited device*/
6954 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6955 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6956 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6957 !supportBurst;
6958
6959 uint8_t supportedHwLvl = limitedDevice ?
6960 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006961#ifndef USE_HAL_3_3
6962 // LEVEL_3 - This device will support level 3.
6963 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6964#else
Thierry Strudel3d639192016-09-09 11:52:26 -07006965 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006966#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006967
6968 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6969 &supportedHwLvl, 1);
6970
6971 bool facingBack = false;
6972 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
6973 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
6974 facingBack = true;
6975 }
6976 /*HAL 3 only*/
6977 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6978 &gCamCapability[cameraId]->min_focus_distance, 1);
6979
6980 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6981 &gCamCapability[cameraId]->hyper_focal_distance, 1);
6982
6983 /*should be using focal lengths but sensor doesn't provide that info now*/
6984 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6985 &gCamCapability[cameraId]->focal_length,
6986 1);
6987
6988 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6989 gCamCapability[cameraId]->apertures,
6990 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6991
6992 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6993 gCamCapability[cameraId]->filter_densities,
6994 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6995
6996
6997 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6998 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6999 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7000
7001 int32_t lens_shading_map_size[] = {
7002 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7003 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7004 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7005 lens_shading_map_size,
7006 sizeof(lens_shading_map_size)/sizeof(int32_t));
7007
7008 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7009 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7010
7011 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7012 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7013
7014 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7015 &gCamCapability[cameraId]->max_frame_duration, 1);
7016
7017 camera_metadata_rational baseGainFactor = {
7018 gCamCapability[cameraId]->base_gain_factor.numerator,
7019 gCamCapability[cameraId]->base_gain_factor.denominator};
7020 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7021 &baseGainFactor, 1);
7022
7023 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7024 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7025
7026 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7027 gCamCapability[cameraId]->pixel_array_size.height};
7028 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7029 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7030
7031 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7032 gCamCapability[cameraId]->active_array_size.top,
7033 gCamCapability[cameraId]->active_array_size.width,
7034 gCamCapability[cameraId]->active_array_size.height};
7035 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7036 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7037
7038 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7039 &gCamCapability[cameraId]->white_level, 1);
7040
7041 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7042 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7043
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007044#ifndef USE_HAL_3_3
7045 bool hasBlackRegions = false;
7046 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7047 LOGW("black_region_count: %d is bounded to %d",
7048 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7049 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7050 }
7051 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7052 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7053 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7054 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7055 }
7056 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7057 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7058 hasBlackRegions = true;
7059 }
7060#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007061 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7062 &gCamCapability[cameraId]->flash_charge_duration, 1);
7063
7064 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7065 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7066
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007067 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7068 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7069 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7070 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007071 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7072 &timestampSource, 1);
7073
7074 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7075 &gCamCapability[cameraId]->histogram_size, 1);
7076
7077 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7078 &gCamCapability[cameraId]->max_histogram_count, 1);
7079
7080 int32_t sharpness_map_size[] = {
7081 gCamCapability[cameraId]->sharpness_map_size.width,
7082 gCamCapability[cameraId]->sharpness_map_size.height};
7083
7084 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7085 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7086
7087 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7088 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7089
7090 int32_t scalar_formats[] = {
7091 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7092 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7093 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7094 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7095 HAL_PIXEL_FORMAT_RAW10,
7096 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7097 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7098 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7099 scalar_formats,
7100 scalar_formats_count);
7101
7102 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7103 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7104 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7105 count, MAX_SIZES_CNT, available_processed_sizes);
7106 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7107 available_processed_sizes, count * 2);
7108
7109 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7110 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7111 makeTable(gCamCapability[cameraId]->raw_dim,
7112 count, MAX_SIZES_CNT, available_raw_sizes);
7113 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7114 available_raw_sizes, count * 2);
7115
7116 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7117 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7118 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7119 count, MAX_SIZES_CNT, available_fps_ranges);
7120 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7121 available_fps_ranges, count * 2);
7122
7123 camera_metadata_rational exposureCompensationStep = {
7124 gCamCapability[cameraId]->exp_compensation_step.numerator,
7125 gCamCapability[cameraId]->exp_compensation_step.denominator};
7126 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7127 &exposureCompensationStep, 1);
7128
7129 Vector<uint8_t> availableVstabModes;
7130 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7131 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07007133 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07007135 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007136 count = IS_TYPE_MAX;
7137 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
7138 for (size_t i = 0; i < count; i++) {
7139 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
7140 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
7141 eisSupported = true;
7142 break;
7143 }
7144 }
7145 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007146 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7147 }
7148 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7149 availableVstabModes.array(), availableVstabModes.size());
7150
7151 /*HAL 1 and HAL 3 common*/
7152 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
7153 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
7154 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
7155 float maxZoom = maxZoomStep/minZoomStep;
7156 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7157 &maxZoom, 1);
7158
7159 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7160 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7161
7162 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7163 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7164 max3aRegions[2] = 0; /* AF not supported */
7165 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7166 max3aRegions, 3);
7167
7168 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7169 memset(prop, 0, sizeof(prop));
7170 property_get("persist.camera.facedetect", prop, "1");
7171 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7172 LOGD("Support face detection mode: %d",
7173 supportedFaceDetectMode);
7174
7175 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07007176 /* support mode should be OFF if max number of face is 0 */
7177 if (maxFaces <= 0) {
7178 supportedFaceDetectMode = 0;
7179 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007180 Vector<uint8_t> availableFaceDetectModes;
7181 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7182 if (supportedFaceDetectMode == 1) {
7183 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7184 } else if (supportedFaceDetectMode == 2) {
7185 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7186 } else if (supportedFaceDetectMode == 3) {
7187 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7188 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7189 } else {
7190 maxFaces = 0;
7191 }
7192 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7193 availableFaceDetectModes.array(),
7194 availableFaceDetectModes.size());
7195 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7196 (int32_t *)&maxFaces, 1);
7197
7198 int32_t exposureCompensationRange[] = {
7199 gCamCapability[cameraId]->exposure_compensation_min,
7200 gCamCapability[cameraId]->exposure_compensation_max};
7201 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7202 exposureCompensationRange,
7203 sizeof(exposureCompensationRange)/sizeof(int32_t));
7204
7205 uint8_t lensFacing = (facingBack) ?
7206 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7207 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7208
7209 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7210 available_thumbnail_sizes,
7211 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7212
7213 /*all sizes will be clubbed into this tag*/
7214 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7215 /*android.scaler.availableStreamConfigurations*/
7216 Vector<int32_t> available_stream_configs;
7217 cam_dimension_t active_array_dim;
7218 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7219 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7220 /* Add input/output stream configurations for each scalar formats*/
7221 for (size_t j = 0; j < scalar_formats_count; j++) {
7222 switch (scalar_formats[j]) {
7223 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7224 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7225 case HAL_PIXEL_FORMAT_RAW10:
7226 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7227 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7228 addStreamConfig(available_stream_configs, scalar_formats[j],
7229 gCamCapability[cameraId]->raw_dim[i],
7230 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7231 }
7232 break;
7233 case HAL_PIXEL_FORMAT_BLOB:
7234 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7235 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7236 addStreamConfig(available_stream_configs, scalar_formats[j],
7237 gCamCapability[cameraId]->picture_sizes_tbl[i],
7238 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7239 }
7240 break;
7241 case HAL_PIXEL_FORMAT_YCbCr_420_888:
7242 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7243 default:
7244 cam_dimension_t largest_picture_size;
7245 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7246 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7247 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7248 addStreamConfig(available_stream_configs, scalar_formats[j],
7249 gCamCapability[cameraId]->picture_sizes_tbl[i],
7250 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7251 /* Book keep largest */
7252 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7253 >= largest_picture_size.width &&
7254 gCamCapability[cameraId]->picture_sizes_tbl[i].height
7255 >= largest_picture_size.height)
7256 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7257 }
7258 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7259 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7260 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7261 addStreamConfig(available_stream_configs, scalar_formats[j],
7262 largest_picture_size,
7263 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7264 }
7265 break;
7266 }
7267 }
7268
7269 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7270 available_stream_configs.array(), available_stream_configs.size());
7271 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7272 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7273
7274 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7275 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7276
7277 /* android.scaler.availableMinFrameDurations */
7278 Vector<int64_t> available_min_durations;
7279 for (size_t j = 0; j < scalar_formats_count; j++) {
7280 switch (scalar_formats[j]) {
7281 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7282 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7283 case HAL_PIXEL_FORMAT_RAW10:
7284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7285 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7286 available_min_durations.add(scalar_formats[j]);
7287 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7288 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7289 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7290 }
7291 break;
7292 default:
7293 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7294 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7295 available_min_durations.add(scalar_formats[j]);
7296 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7297 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7298 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7299 }
7300 break;
7301 }
7302 }
7303 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7304 available_min_durations.array(), available_min_durations.size());
7305
7306 Vector<int32_t> available_hfr_configs;
7307 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7308 int32_t fps = 0;
7309 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7310 case CAM_HFR_MODE_60FPS:
7311 fps = 60;
7312 break;
7313 case CAM_HFR_MODE_90FPS:
7314 fps = 90;
7315 break;
7316 case CAM_HFR_MODE_120FPS:
7317 fps = 120;
7318 break;
7319 case CAM_HFR_MODE_150FPS:
7320 fps = 150;
7321 break;
7322 case CAM_HFR_MODE_180FPS:
7323 fps = 180;
7324 break;
7325 case CAM_HFR_MODE_210FPS:
7326 fps = 210;
7327 break;
7328 case CAM_HFR_MODE_240FPS:
7329 fps = 240;
7330 break;
7331 case CAM_HFR_MODE_480FPS:
7332 fps = 480;
7333 break;
7334 case CAM_HFR_MODE_OFF:
7335 case CAM_HFR_MODE_MAX:
7336 default:
7337 break;
7338 }
7339
7340 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7341 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7342 /* For each HFR frame rate, need to advertise one variable fps range
7343 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7344 * and [120, 120]. While camcorder preview alone is running [30, 120] is
7345 * set by the app. When video recording is started, [120, 120] is
7346 * set. This way sensor configuration does not change when recording
7347 * is started */
7348
7349 /* (width, height, fps_min, fps_max, batch_size_max) */
7350 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7351 j < MAX_SIZES_CNT; j++) {
7352 available_hfr_configs.add(
7353 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7354 available_hfr_configs.add(
7355 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7356 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7357 available_hfr_configs.add(fps);
7358 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7359
7360 /* (width, height, fps_min, fps_max, batch_size_max) */
7361 available_hfr_configs.add(
7362 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7363 available_hfr_configs.add(
7364 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7365 available_hfr_configs.add(fps);
7366 available_hfr_configs.add(fps);
7367 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7368 }
7369 }
7370 }
7371 //Advertise HFR capability only if the property is set
7372 memset(prop, 0, sizeof(prop));
7373 property_get("persist.camera.hal3hfr.enable", prop, "1");
7374 uint8_t hfrEnable = (uint8_t)atoi(prop);
7375
7376 if(hfrEnable && available_hfr_configs.array()) {
7377 staticInfo.update(
7378 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7379 available_hfr_configs.array(), available_hfr_configs.size());
7380 }
7381
7382 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7383 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7384 &max_jpeg_size, 1);
7385
7386 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7387 size_t size = 0;
7388 count = CAM_EFFECT_MODE_MAX;
7389 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7390 for (size_t i = 0; i < count; i++) {
7391 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7392 gCamCapability[cameraId]->supported_effects[i]);
7393 if (NAME_NOT_FOUND != val) {
7394 avail_effects[size] = (uint8_t)val;
7395 size++;
7396 }
7397 }
7398 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7399 avail_effects,
7400 size);
7401
7402 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7403 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7404 size_t supported_scene_modes_cnt = 0;
7405 count = CAM_SCENE_MODE_MAX;
7406 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7407 for (size_t i = 0; i < count; i++) {
7408 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7409 CAM_SCENE_MODE_OFF) {
7410 int val = lookupFwkName(SCENE_MODES_MAP,
7411 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7412 gCamCapability[cameraId]->supported_scene_modes[i]);
7413 if (NAME_NOT_FOUND != val) {
7414 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7415 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7416 supported_scene_modes_cnt++;
7417 }
7418 }
7419 }
7420 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7421 avail_scene_modes,
7422 supported_scene_modes_cnt);
7423
7424 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
7425 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7426 supported_scene_modes_cnt,
7427 CAM_SCENE_MODE_MAX,
7428 scene_mode_overrides,
7429 supported_indexes,
7430 cameraId);
7431
7432 if (supported_scene_modes_cnt == 0) {
7433 supported_scene_modes_cnt = 1;
7434 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7435 }
7436
7437 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7438 scene_mode_overrides, supported_scene_modes_cnt * 3);
7439
7440 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7441 ANDROID_CONTROL_MODE_AUTO,
7442 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7443 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7444 available_control_modes,
7445 3);
7446
7447 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7448 size = 0;
7449 count = CAM_ANTIBANDING_MODE_MAX;
7450 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7451 for (size_t i = 0; i < count; i++) {
7452 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7453 gCamCapability[cameraId]->supported_antibandings[i]);
7454 if (NAME_NOT_FOUND != val) {
7455 avail_antibanding_modes[size] = (uint8_t)val;
7456 size++;
7457 }
7458
7459 }
7460 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7461 avail_antibanding_modes,
7462 size);
7463
7464 uint8_t avail_abberation_modes[] = {
7465 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7466 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7467 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7468 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7469 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7470 if (0 == count) {
7471 // If no aberration correction modes are available for a device, this advertise OFF mode
7472 size = 1;
7473 } else {
7474 // If count is not zero then atleast one among the FAST or HIGH quality is supported
7475 // So, advertize all 3 modes if atleast any one mode is supported as per the
7476 // new M requirement
7477 size = 3;
7478 }
7479 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7480 avail_abberation_modes,
7481 size);
7482
7483 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7484 size = 0;
7485 count = CAM_FOCUS_MODE_MAX;
7486 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7487 for (size_t i = 0; i < count; i++) {
7488 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7489 gCamCapability[cameraId]->supported_focus_modes[i]);
7490 if (NAME_NOT_FOUND != val) {
7491 avail_af_modes[size] = (uint8_t)val;
7492 size++;
7493 }
7494 }
7495 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7496 avail_af_modes,
7497 size);
7498
7499 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7500 size = 0;
7501 count = CAM_WB_MODE_MAX;
7502 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7503 for (size_t i = 0; i < count; i++) {
7504 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7505 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7506 gCamCapability[cameraId]->supported_white_balances[i]);
7507 if (NAME_NOT_FOUND != val) {
7508 avail_awb_modes[size] = (uint8_t)val;
7509 size++;
7510 }
7511 }
7512 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7513 avail_awb_modes,
7514 size);
7515
7516 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7517 count = CAM_FLASH_FIRING_LEVEL_MAX;
7518 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7519 count);
7520 for (size_t i = 0; i < count; i++) {
7521 available_flash_levels[i] =
7522 gCamCapability[cameraId]->supported_firing_levels[i];
7523 }
7524 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7525 available_flash_levels, count);
7526
7527 uint8_t flashAvailable;
7528 if (gCamCapability[cameraId]->flash_available)
7529 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7530 else
7531 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7532 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7533 &flashAvailable, 1);
7534
7535 Vector<uint8_t> avail_ae_modes;
7536 count = CAM_AE_MODE_MAX;
7537 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7538 for (size_t i = 0; i < count; i++) {
7539 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7540 }
7541 if (flashAvailable) {
7542 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7543 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7544 }
7545 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7546 avail_ae_modes.array(),
7547 avail_ae_modes.size());
7548
7549 int32_t sensitivity_range[2];
7550 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7551 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7552 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7553 sensitivity_range,
7554 sizeof(sensitivity_range) / sizeof(int32_t));
7555
7556 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7557 &gCamCapability[cameraId]->max_analog_sensitivity,
7558 1);
7559
7560 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7561 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7562 &sensor_orientation,
7563 1);
7564
7565 int32_t max_output_streams[] = {
7566 MAX_STALLING_STREAMS,
7567 MAX_PROCESSED_STREAMS,
7568 MAX_RAW_STREAMS};
7569 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7570 max_output_streams,
7571 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7572
7573 uint8_t avail_leds = 0;
7574 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7575 &avail_leds, 0);
7576
7577 uint8_t focus_dist_calibrated;
7578 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7579 gCamCapability[cameraId]->focus_dist_calibrated);
7580 if (NAME_NOT_FOUND != val) {
7581 focus_dist_calibrated = (uint8_t)val;
7582 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7583 &focus_dist_calibrated, 1);
7584 }
7585
7586 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7587 size = 0;
7588 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7589 MAX_TEST_PATTERN_CNT);
7590 for (size_t i = 0; i < count; i++) {
7591 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7592 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7593 if (NAME_NOT_FOUND != testpatternMode) {
7594 avail_testpattern_modes[size] = testpatternMode;
7595 size++;
7596 }
7597 }
7598 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7599 avail_testpattern_modes,
7600 size);
7601
7602 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7603 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7604 &max_pipeline_depth,
7605 1);
7606
7607 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7608 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7609 &partial_result_count,
7610 1);
7611
7612 int32_t max_stall_duration = MAX_REPROCESS_STALL;
7613 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7614
7615 Vector<uint8_t> available_capabilities;
7616 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7617 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7618 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7619 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7620 if (supportBurst) {
7621 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7622 }
7623 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7624 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7625 if (hfrEnable && available_hfr_configs.array()) {
7626 available_capabilities.add(
7627 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7628 }
7629
7630 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7631 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7632 }
7633 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7634 available_capabilities.array(),
7635 available_capabilities.size());
7636
7637 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7638 //Assumption is that all bayer cameras support MANUAL_SENSOR.
7639 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7640 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7641
7642 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7643 &aeLockAvailable, 1);
7644
7645 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7646 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7647 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7648 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7649
7650 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7651 &awbLockAvailable, 1);
7652
7653 int32_t max_input_streams = 1;
7654 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7655 &max_input_streams,
7656 1);
7657
7658 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7659 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7660 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7661 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7662 HAL_PIXEL_FORMAT_YCbCr_420_888};
7663 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7664 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7665
7666 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7667 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7668 &max_latency,
7669 1);
7670
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007671#ifndef USE_HAL_3_3
7672 int32_t isp_sensitivity_range[2];
7673 isp_sensitivity_range[0] =
7674 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7675 isp_sensitivity_range[1] =
7676 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7677 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7678 isp_sensitivity_range,
7679 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7680#endif
7681
Thierry Strudel3d639192016-09-09 11:52:26 -07007682 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7683 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7684 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7685 available_hot_pixel_modes,
7686 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7687
7688 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7689 ANDROID_SHADING_MODE_FAST,
7690 ANDROID_SHADING_MODE_HIGH_QUALITY};
7691 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7692 available_shading_modes,
7693 3);
7694
7695 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7696 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7697 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7698 available_lens_shading_map_modes,
7699 2);
7700
7701 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7702 ANDROID_EDGE_MODE_FAST,
7703 ANDROID_EDGE_MODE_HIGH_QUALITY,
7704 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7705 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7706 available_edge_modes,
7707 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7708
7709 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7710 ANDROID_NOISE_REDUCTION_MODE_FAST,
7711 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7712 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7713 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7714 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7715 available_noise_red_modes,
7716 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7717
7718 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7719 ANDROID_TONEMAP_MODE_FAST,
7720 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7721 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7722 available_tonemap_modes,
7723 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7724
7725 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7726 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7727 available_hot_pixel_map_modes,
7728 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7729
7730 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7731 gCamCapability[cameraId]->reference_illuminant1);
7732 if (NAME_NOT_FOUND != val) {
7733 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7734 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7735 }
7736
7737 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7738 gCamCapability[cameraId]->reference_illuminant2);
7739 if (NAME_NOT_FOUND != val) {
7740 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7741 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7742 }
7743
7744 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7745 (void *)gCamCapability[cameraId]->forward_matrix1,
7746 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7747
7748 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7749 (void *)gCamCapability[cameraId]->forward_matrix2,
7750 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7751
7752 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7753 (void *)gCamCapability[cameraId]->color_transform1,
7754 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7755
7756 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7757 (void *)gCamCapability[cameraId]->color_transform2,
7758 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7759
7760 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7761 (void *)gCamCapability[cameraId]->calibration_transform1,
7762 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7763
7764 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7765 (void *)gCamCapability[cameraId]->calibration_transform2,
7766 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7767
7768 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7769 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7770 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7771 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7772 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7773 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7774 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7775 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7776 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7777 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7778 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7779 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7780 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7781 ANDROID_JPEG_GPS_COORDINATES,
7782 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7783 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7784 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7785 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7786 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7787 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7788 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7789 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7790 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7791 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007792#ifndef USE_HAL_3_3
7793 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7794#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007795 ANDROID_STATISTICS_FACE_DETECT_MODE,
7796 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7797 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7798 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7799 ANDROID_BLACK_LEVEL_LOCK };
7800
7801 size_t request_keys_cnt =
7802 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7803 Vector<int32_t> available_request_keys;
7804 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7805 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7806 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7807 }
7808
7809 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7810 available_request_keys.array(), available_request_keys.size());
7811
7812 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7813 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7814 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7815 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7816 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7817 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7818 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7819 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7820 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7821 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7822 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7823 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7824 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7825 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7826 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7827 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7828 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7829 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7830 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7831 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7832 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007833 ANDROID_STATISTICS_FACE_SCORES,
7834#ifndef USE_HAL_3_3
7835 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7836#endif
7837 };
7838
Thierry Strudel3d639192016-09-09 11:52:26 -07007839 size_t result_keys_cnt =
7840 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7841
7842 Vector<int32_t> available_result_keys;
7843 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7844 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7845 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7846 }
7847 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7848 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7849 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7850 }
7851 if (supportedFaceDetectMode == 1) {
7852 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7853 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7854 } else if ((supportedFaceDetectMode == 2) ||
7855 (supportedFaceDetectMode == 3)) {
7856 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7857 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7858 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007859#ifndef USE_HAL_3_3
7860 if (hasBlackRegions) {
7861 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7862 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7863 }
7864#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007865 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7866 available_result_keys.array(), available_result_keys.size());
7867
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007868 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07007869 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7870 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7871 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7872 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7873 ANDROID_SCALER_CROPPING_TYPE,
7874 ANDROID_SYNC_MAX_LATENCY,
7875 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7876 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7877 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7878 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7879 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7880 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7881 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7882 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7883 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7884 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7885 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7886 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7887 ANDROID_LENS_FACING,
7888 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7889 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7890 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7891 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7892 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7893 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7894 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7895 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7896 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7897 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7898 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7899 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7900 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7901 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7902 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7903 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7904 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7905 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7906 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7907 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7908 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7909 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7910 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7911 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7912 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7913 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7914 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7915 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7916 ANDROID_TONEMAP_MAX_CURVE_POINTS,
7917 ANDROID_CONTROL_AVAILABLE_MODES,
7918 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7919 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7920 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7921 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007922 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7923#ifndef USE_HAL_3_3
7924 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
7925 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7926#endif
7927 };
7928
7929 Vector<int32_t> available_characteristics_keys;
7930 available_characteristics_keys.appendArray(characteristics_keys_basic,
7931 sizeof(characteristics_keys_basic)/sizeof(int32_t));
7932#ifndef USE_HAL_3_3
7933 if (hasBlackRegions) {
7934 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7935 }
7936#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007937 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007938 available_characteristics_keys.array(),
7939 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07007940
7941 /*available stall durations depend on the hw + sw and will be different for different devices */
7942 /*have to add for raw after implementation*/
7943 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7944 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7945
7946 Vector<int64_t> available_stall_durations;
7947 for (uint32_t j = 0; j < stall_formats_count; j++) {
7948 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7949 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7950 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7951 available_stall_durations.add(stall_formats[j]);
7952 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7953 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7954 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7955 }
7956 } else {
7957 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7958 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7959 available_stall_durations.add(stall_formats[j]);
7960 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7961 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7962 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7963 }
7964 }
7965 }
7966 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7967 available_stall_durations.array(),
7968 available_stall_durations.size());
7969
7970 //QCAMERA3_OPAQUE_RAW
7971 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7972 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7973 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7974 case LEGACY_RAW:
7975 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7976 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7977 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7978 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7979 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7980 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7981 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7982 break;
7983 case MIPI_RAW:
7984 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7985 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7986 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7987 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7988 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7989 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7990 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7991 break;
7992 default:
7993 LOGE("unknown opaque_raw_format %d",
7994 gCamCapability[cameraId]->opaque_raw_fmt);
7995 break;
7996 }
7997 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7998
7999 Vector<int32_t> strides;
8000 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8001 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8002 cam_stream_buf_plane_info_t buf_planes;
8003 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8004 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8005 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8006 &gCamCapability[cameraId]->padding_info, &buf_planes);
8007 strides.add(buf_planes.plane_info.mp[0].stride);
8008 }
8009 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8010 strides.size());
8011
Thierry Strudel04e026f2016-10-10 11:27:36 -07008012 //Video HDR default
8013 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8014 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8015 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8016 int32_t vhdr_mode[] = {
8017 QCAMERA3_VIDEO_HDR_MODE_OFF,
8018 QCAMERA3_VIDEO_HDR_MODE_ON};
8019
8020 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8021 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8022 vhdr_mode, vhdr_mode_count);
8023 }
8024
Thierry Strudel3d639192016-09-09 11:52:26 -07008025 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8026 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8027 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8028
8029 uint8_t isMonoOnly =
8030 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8031 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8032 &isMonoOnly, 1);
8033
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008034#ifndef USE_HAL_3_3
8035 Vector<int32_t> opaque_size;
8036 for (size_t j = 0; j < scalar_formats_count; j++) {
8037 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8038 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8039 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8040 cam_stream_buf_plane_info_t buf_planes;
8041
8042 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8043 &gCamCapability[cameraId]->padding_info, &buf_planes);
8044
8045 if (rc == 0) {
8046 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8047 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8048 opaque_size.add(buf_planes.plane_info.frame_len);
8049 }else {
8050 LOGE("raw frame calculation failed!");
8051 }
8052 }
8053 }
8054 }
8055
8056 if ((opaque_size.size() > 0) &&
8057 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8058 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8059 else
8060 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8061#endif
8062
Thierry Strudel04e026f2016-10-10 11:27:36 -07008063 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
8064 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
8065 size = 0;
8066 count = CAM_IR_MODE_MAX;
8067 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
8068 for (size_t i = 0; i < count; i++) {
8069 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
8070 gCamCapability[cameraId]->supported_ir_modes[i]);
8071 if (NAME_NOT_FOUND != val) {
8072 avail_ir_modes[size] = (int32_t)val;
8073 size++;
8074 }
8075 }
8076 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
8077 avail_ir_modes, size);
8078 }
8079
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008080 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
8081 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
8082 size = 0;
8083 count = CAM_AEC_CONVERGENCE_MAX;
8084 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
8085 for (size_t i = 0; i < count; i++) {
8086 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
8087 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
8088 if (NAME_NOT_FOUND != val) {
8089 available_instant_aec_modes[size] = (int32_t)val;
8090 size++;
8091 }
8092 }
8093 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
8094 available_instant_aec_modes, size);
8095 }
8096
Thierry Strudel3d639192016-09-09 11:52:26 -07008097 gStaticMetadata[cameraId] = staticInfo.release();
8098 return rc;
8099}
8100
8101/*===========================================================================
8102 * FUNCTION : makeTable
8103 *
8104 * DESCRIPTION: make a table of sizes
8105 *
8106 * PARAMETERS :
8107 *
8108 *
8109 *==========================================================================*/
8110void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8111 size_t max_size, int32_t *sizeTable)
8112{
8113 size_t j = 0;
8114 if (size > max_size) {
8115 size = max_size;
8116 }
8117 for (size_t i = 0; i < size; i++) {
8118 sizeTable[j] = dimTable[i].width;
8119 sizeTable[j+1] = dimTable[i].height;
8120 j+=2;
8121 }
8122}
8123
8124/*===========================================================================
8125 * FUNCTION : makeFPSTable
8126 *
8127 * DESCRIPTION: make a table of fps ranges
8128 *
8129 * PARAMETERS :
8130 *
8131 *==========================================================================*/
8132void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8133 size_t max_size, int32_t *fpsRangesTable)
8134{
8135 size_t j = 0;
8136 if (size > max_size) {
8137 size = max_size;
8138 }
8139 for (size_t i = 0; i < size; i++) {
8140 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8141 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8142 j+=2;
8143 }
8144}
8145
8146/*===========================================================================
8147 * FUNCTION : makeOverridesList
8148 *
8149 * DESCRIPTION: make a list of scene mode overrides
8150 *
8151 * PARAMETERS :
8152 *
8153 *
8154 *==========================================================================*/
8155void QCamera3HardwareInterface::makeOverridesList(
8156 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8157 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8158{
8159 /*daemon will give a list of overrides for all scene modes.
8160 However we should send the fwk only the overrides for the scene modes
8161 supported by the framework*/
8162 size_t j = 0;
8163 if (size > max_size) {
8164 size = max_size;
8165 }
8166 size_t focus_count = CAM_FOCUS_MODE_MAX;
8167 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8168 focus_count);
8169 for (size_t i = 0; i < size; i++) {
8170 bool supt = false;
8171 size_t index = supported_indexes[i];
8172 overridesList[j] = gCamCapability[camera_id]->flash_available ?
8173 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8174 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8175 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8176 overridesTable[index].awb_mode);
8177 if (NAME_NOT_FOUND != val) {
8178 overridesList[j+1] = (uint8_t)val;
8179 }
8180 uint8_t focus_override = overridesTable[index].af_mode;
8181 for (size_t k = 0; k < focus_count; k++) {
8182 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8183 supt = true;
8184 break;
8185 }
8186 }
8187 if (supt) {
8188 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8189 focus_override);
8190 if (NAME_NOT_FOUND != val) {
8191 overridesList[j+2] = (uint8_t)val;
8192 }
8193 } else {
8194 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8195 }
8196 j+=3;
8197 }
8198}
8199
8200/*===========================================================================
8201 * FUNCTION : filterJpegSizes
8202 *
8203 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8204 * could be downscaled to
8205 *
8206 * PARAMETERS :
8207 *
8208 * RETURN : length of jpegSizes array
8209 *==========================================================================*/
8210
8211size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8212 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8213 uint8_t downscale_factor)
8214{
8215 if (0 == downscale_factor) {
8216 downscale_factor = 1;
8217 }
8218
8219 int32_t min_width = active_array_size.width / downscale_factor;
8220 int32_t min_height = active_array_size.height / downscale_factor;
8221 size_t jpegSizesCnt = 0;
8222 if (processedSizesCnt > maxCount) {
8223 processedSizesCnt = maxCount;
8224 }
8225 for (size_t i = 0; i < processedSizesCnt; i+=2) {
8226 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8227 jpegSizes[jpegSizesCnt] = processedSizes[i];
8228 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8229 jpegSizesCnt += 2;
8230 }
8231 }
8232 return jpegSizesCnt;
8233}
8234
8235/*===========================================================================
8236 * FUNCTION : computeNoiseModelEntryS
8237 *
8238 * DESCRIPTION: function to map a given sensitivity to the S noise
8239 * model parameters in the DNG noise model.
8240 *
8241 * PARAMETERS : sens : the sensor sensitivity
8242 *
8243 ** RETURN : S (sensor amplification) noise
8244 *
8245 *==========================================================================*/
8246double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8247 double s = gCamCapability[mCameraId]->gradient_S * sens +
8248 gCamCapability[mCameraId]->offset_S;
8249 return ((s < 0.0) ? 0.0 : s);
8250}
8251
8252/*===========================================================================
8253 * FUNCTION : computeNoiseModelEntryO
8254 *
8255 * DESCRIPTION: function to map a given sensitivity to the O noise
8256 * model parameters in the DNG noise model.
8257 *
8258 * PARAMETERS : sens : the sensor sensitivity
8259 *
8260 ** RETURN : O (sensor readout) noise
8261 *
8262 *==========================================================================*/
8263double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8264 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8265 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8266 1.0 : (1.0 * sens / max_analog_sens);
8267 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8268 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8269 return ((o < 0.0) ? 0.0 : o);
8270}
8271
8272/*===========================================================================
8273 * FUNCTION : getSensorSensitivity
8274 *
8275 * DESCRIPTION: convert iso_mode to an integer value
8276 *
8277 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8278 *
8279 ** RETURN : sensitivity supported by sensor
8280 *
8281 *==========================================================================*/
8282int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8283{
8284 int32_t sensitivity;
8285
8286 switch (iso_mode) {
8287 case CAM_ISO_MODE_100:
8288 sensitivity = 100;
8289 break;
8290 case CAM_ISO_MODE_200:
8291 sensitivity = 200;
8292 break;
8293 case CAM_ISO_MODE_400:
8294 sensitivity = 400;
8295 break;
8296 case CAM_ISO_MODE_800:
8297 sensitivity = 800;
8298 break;
8299 case CAM_ISO_MODE_1600:
8300 sensitivity = 1600;
8301 break;
8302 default:
8303 sensitivity = -1;
8304 break;
8305 }
8306 return sensitivity;
8307}
8308
8309/*===========================================================================
8310 * FUNCTION : getCamInfo
8311 *
8312 * DESCRIPTION: query camera capabilities
8313 *
8314 * PARAMETERS :
8315 * @cameraId : camera Id
8316 * @info : camera info struct to be filled in with camera capabilities
8317 *
8318 * RETURN : int type of status
8319 * NO_ERROR -- success
8320 * none-zero failure code
8321 *==========================================================================*/
8322int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8323 struct camera_info *info)
8324{
8325 ATRACE_CALL();
8326 int rc = 0;
8327
8328 pthread_mutex_lock(&gCamLock);
8329 if (NULL == gCamCapability[cameraId]) {
8330 rc = initCapabilities(cameraId);
8331 if (rc < 0) {
8332 pthread_mutex_unlock(&gCamLock);
8333 return rc;
8334 }
8335 }
8336
8337 if (NULL == gStaticMetadata[cameraId]) {
8338 rc = initStaticMetadata(cameraId);
8339 if (rc < 0) {
8340 pthread_mutex_unlock(&gCamLock);
8341 return rc;
8342 }
8343 }
8344
8345 switch(gCamCapability[cameraId]->position) {
8346 case CAM_POSITION_BACK:
8347 case CAM_POSITION_BACK_AUX:
8348 info->facing = CAMERA_FACING_BACK;
8349 break;
8350
8351 case CAM_POSITION_FRONT:
8352 case CAM_POSITION_FRONT_AUX:
8353 info->facing = CAMERA_FACING_FRONT;
8354 break;
8355
8356 default:
8357 LOGE("Unknown position type %d for camera id:%d",
8358 gCamCapability[cameraId]->position, cameraId);
8359 rc = -1;
8360 break;
8361 }
8362
8363
8364 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008365#ifndef USE_HAL_3_3
8366 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8367#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008368 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008369#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008370 info->static_camera_characteristics = gStaticMetadata[cameraId];
8371
8372 //For now assume both cameras can operate independently.
8373 info->conflicting_devices = NULL;
8374 info->conflicting_devices_length = 0;
8375
8376 //resource cost is 100 * MIN(1.0, m/M),
8377 //where m is throughput requirement with maximum stream configuration
8378 //and M is CPP maximum throughput.
8379 float max_fps = 0.0;
8380 for (uint32_t i = 0;
8381 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8382 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8383 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8384 }
8385 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8386 gCamCapability[cameraId]->active_array_size.width *
8387 gCamCapability[cameraId]->active_array_size.height * max_fps /
8388 gCamCapability[cameraId]->max_pixel_bandwidth;
8389 info->resource_cost = 100 * MIN(1.0, ratio);
8390 LOGI("camera %d resource cost is %d", cameraId,
8391 info->resource_cost);
8392
8393 pthread_mutex_unlock(&gCamLock);
8394 return rc;
8395}
8396
8397/*===========================================================================
8398 * FUNCTION : translateCapabilityToMetadata
8399 *
8400 * DESCRIPTION: translate the capability into camera_metadata_t
8401 *
8402 * PARAMETERS : type of the request
8403 *
8404 *
8405 * RETURN : success: camera_metadata_t*
8406 * failure: NULL
8407 *
8408 *==========================================================================*/
8409camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8410{
8411 if (mDefaultMetadata[type] != NULL) {
8412 return mDefaultMetadata[type];
8413 }
8414 //first time we are handling this request
8415 //fill up the metadata structure using the wrapper class
8416 CameraMetadata settings;
8417 //translate from cam_capability_t to camera_metadata_tag_t
8418 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8419 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8420 int32_t defaultRequestID = 0;
8421 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8422
8423 /* OIS disable */
8424 char ois_prop[PROPERTY_VALUE_MAX];
8425 memset(ois_prop, 0, sizeof(ois_prop));
8426 property_get("persist.camera.ois.disable", ois_prop, "0");
8427 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8428
8429 /* Force video to use OIS */
8430 char videoOisProp[PROPERTY_VALUE_MAX];
8431 memset(videoOisProp, 0, sizeof(videoOisProp));
8432 property_get("persist.camera.ois.video", videoOisProp, "1");
8433 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Thierry Strudel3d639192016-09-09 11:52:26 -07008434 uint8_t controlIntent = 0;
8435 uint8_t focusMode;
8436 uint8_t vsMode;
8437 uint8_t optStabMode;
8438 uint8_t cacMode;
8439 uint8_t edge_mode;
8440 uint8_t noise_red_mode;
8441 uint8_t tonemap_mode;
8442 bool highQualityModeEntryAvailable = FALSE;
8443 bool fastModeEntryAvailable = FALSE;
8444 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8445 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8446 switch (type) {
8447 case CAMERA3_TEMPLATE_PREVIEW:
8448 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8449 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8450 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8451 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8452 edge_mode = ANDROID_EDGE_MODE_FAST;
8453 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8454 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8455 break;
8456 case CAMERA3_TEMPLATE_STILL_CAPTURE:
8457 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8458 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8459 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8460 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8461 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8462 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8463 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8464 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8465 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8466 if (gCamCapability[mCameraId]->aberration_modes[i] ==
8467 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8468 highQualityModeEntryAvailable = TRUE;
8469 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8470 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8471 fastModeEntryAvailable = TRUE;
8472 }
8473 }
8474 if (highQualityModeEntryAvailable) {
8475 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8476 } else if (fastModeEntryAvailable) {
8477 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8478 }
8479 break;
8480 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8481 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8482 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8483 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07008484 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8485 edge_mode = ANDROID_EDGE_MODE_FAST;
8486 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8487 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8488 if (forceVideoOis)
8489 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8490 break;
8491 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8492 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8493 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8494 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07008495 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8496 edge_mode = ANDROID_EDGE_MODE_FAST;
8497 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8498 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8499 if (forceVideoOis)
8500 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8501 break;
8502 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8503 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8504 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8505 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8506 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8507 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8508 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8509 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8510 break;
8511 case CAMERA3_TEMPLATE_MANUAL:
8512 edge_mode = ANDROID_EDGE_MODE_FAST;
8513 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8514 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8515 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8516 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8517 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8518 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8519 break;
8520 default:
8521 edge_mode = ANDROID_EDGE_MODE_FAST;
8522 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8523 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8524 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8525 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8526 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8527 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8528 break;
8529 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07008530 // Set CAC to OFF if underlying device doesn't support
8531 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8532 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008534 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8535 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8536 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8537 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8538 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8539 }
8540 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8541
8542 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8543 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8544 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8545 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8546 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8547 || ois_disable)
8548 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8549 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8550
8551 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8552 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8553
8554 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8555 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8556
8557 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8558 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8559
8560 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8561 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8562
8563 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8564 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8565
8566 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8567 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8568
8569 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8570 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8571
8572 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8573 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8574
8575 /*flash*/
8576 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8577 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8578
8579 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8580 settings.update(ANDROID_FLASH_FIRING_POWER,
8581 &flashFiringLevel, 1);
8582
8583 /* lens */
8584 float default_aperture = gCamCapability[mCameraId]->apertures[0];
8585 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8586
8587 if (gCamCapability[mCameraId]->filter_densities_count) {
8588 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8589 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8590 gCamCapability[mCameraId]->filter_densities_count);
8591 }
8592
8593 float default_focal_length = gCamCapability[mCameraId]->focal_length;
8594 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8595
8596 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8597 float default_focus_distance = 0;
8598 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8599 }
8600
8601 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8602 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8603
8604 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8605 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8606
8607 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8608 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8609
8610 /* face detection (default to OFF) */
8611 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8612 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8613
8614 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8615 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8616
8617 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8618 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8619
8620 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8621 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8622
8623 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8624 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8625
8626 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8627 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8628
8629 /* Exposure time(Update the Min Exposure Time)*/
8630 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8631 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8632
8633 /* frame duration */
8634 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8635 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8636
8637 /* sensitivity */
8638 static const int32_t default_sensitivity = 100;
8639 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008640#ifndef USE_HAL_3_3
8641 static const int32_t default_isp_sensitivity =
8642 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8643 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8644#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008645
8646 /*edge mode*/
8647 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8648
8649 /*noise reduction mode*/
8650 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8651
8652 /*color correction mode*/
8653 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8654 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8655
8656 /*transform matrix mode*/
8657 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8658
8659 int32_t scaler_crop_region[4];
8660 scaler_crop_region[0] = 0;
8661 scaler_crop_region[1] = 0;
8662 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8663 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8664 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8665
8666 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8667 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8668
8669 /*focus distance*/
8670 float focus_distance = 0.0;
8671 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8672
8673 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8674 float max_range = 0.0;
8675 float max_fixed_fps = 0.0;
8676 int32_t fps_range[2] = {0, 0};
8677 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8678 i++) {
8679 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8680 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8681 if (type == CAMERA3_TEMPLATE_PREVIEW ||
8682 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8683 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8684 if (range > max_range) {
8685 fps_range[0] =
8686 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8687 fps_range[1] =
8688 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8689 max_range = range;
8690 }
8691 } else {
8692 if (range < 0.01 && max_fixed_fps <
8693 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8694 fps_range[0] =
8695 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8696 fps_range[1] =
8697 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8698 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8699 }
8700 }
8701 }
8702 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8703
8704 /*precapture trigger*/
8705 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8706 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8707
8708 /*af trigger*/
8709 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8710 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8711
8712 /* ae & af regions */
8713 int32_t active_region[] = {
8714 gCamCapability[mCameraId]->active_array_size.left,
8715 gCamCapability[mCameraId]->active_array_size.top,
8716 gCamCapability[mCameraId]->active_array_size.left +
8717 gCamCapability[mCameraId]->active_array_size.width,
8718 gCamCapability[mCameraId]->active_array_size.top +
8719 gCamCapability[mCameraId]->active_array_size.height,
8720 0};
8721 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8722 sizeof(active_region) / sizeof(active_region[0]));
8723 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8724 sizeof(active_region) / sizeof(active_region[0]));
8725
8726 /* black level lock */
8727 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8728 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8729
8730 /* lens shading map mode */
8731 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8732 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8733 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8734 }
8735 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8736
8737 //special defaults for manual template
8738 if (type == CAMERA3_TEMPLATE_MANUAL) {
8739 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8740 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8741
8742 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8743 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8744
8745 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8746 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8747
8748 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8749 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8750
8751 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8752 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8753
8754 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8755 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8756 }
8757
8758
8759 /* TNR
8760 * We'll use this location to determine which modes TNR will be set.
8761 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8762 * This is not to be confused with linking on a per stream basis that decision
8763 * is still on per-session basis and will be handled as part of config stream
8764 */
8765 uint8_t tnr_enable = 0;
8766
8767 if (m_bTnrPreview || m_bTnrVideo) {
8768
8769 switch (type) {
8770 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8771 tnr_enable = 1;
8772 break;
8773
8774 default:
8775 tnr_enable = 0;
8776 break;
8777 }
8778
8779 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8780 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8781 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8782
8783 LOGD("TNR:%d with process plate %d for template:%d",
8784 tnr_enable, tnr_process_type, type);
8785 }
8786
8787 //Update Link tags to default
8788 int32_t sync_type = CAM_TYPE_STANDALONE;
8789 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8790
8791 int32_t is_main = 0; //this doesn't matter as app should overwrite
8792 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8793
8794 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8795
8796 /* CDS default */
8797 char prop[PROPERTY_VALUE_MAX];
8798 memset(prop, 0, sizeof(prop));
8799 property_get("persist.camera.CDS", prop, "Auto");
8800 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8801 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8802 if (CAM_CDS_MODE_MAX == cds_mode) {
8803 cds_mode = CAM_CDS_MODE_AUTO;
8804 }
8805
8806 /* Disabling CDS in templates which have TNR enabled*/
8807 if (tnr_enable)
8808 cds_mode = CAM_CDS_MODE_OFF;
8809
8810 int32_t mode = cds_mode;
8811 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07008812
8813 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
8814 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
8815
8816 /* IR Mode Default Off */
8817 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
8818 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
8819
Thierry Strudel269c81a2016-10-12 12:13:59 -07008820 /* Manual Convergence AEC Speed is disabled by default*/
8821 float default_aec_speed = 0;
8822 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
8823
8824 /* Manual Convergence AWB Speed is disabled by default*/
8825 float default_awb_speed = 0;
8826 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
8827
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008828 // Set instant AEC to normal convergence by default
8829 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
8830 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
8831
Thierry Strudel3d639192016-09-09 11:52:26 -07008832 mDefaultMetadata[type] = settings.release();
8833
8834 return mDefaultMetadata[type];
8835}
8836
8837/*===========================================================================
8838 * FUNCTION : setFrameParameters
8839 *
8840 * DESCRIPTION: set parameters per frame as requested in the metadata from
8841 * framework
8842 *
8843 * PARAMETERS :
8844 * @request : request that needs to be serviced
8845 * @streamID : Stream ID of all the requested streams
8846 * @blob_request: Whether this request is a blob request or not
8847 *
8848 * RETURN : success: NO_ERROR
8849 * failure:
8850 *==========================================================================*/
8851int QCamera3HardwareInterface::setFrameParameters(
8852 camera3_capture_request_t *request,
8853 cam_stream_ID_t streamID,
8854 int blob_request,
8855 uint32_t snapshotStreamId)
8856{
8857 /*translate from camera_metadata_t type to parm_type_t*/
8858 int rc = 0;
8859 int32_t hal_version = CAM_HAL_V3;
8860
8861 clear_metadata_buffer(mParameters);
8862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8863 LOGE("Failed to set hal version in the parameters");
8864 return BAD_VALUE;
8865 }
8866
8867 /*we need to update the frame number in the parameters*/
8868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8869 request->frame_number)) {
8870 LOGE("Failed to set the frame number in the parameters");
8871 return BAD_VALUE;
8872 }
8873
8874 /* Update stream id of all the requested buffers */
8875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8876 LOGE("Failed to set stream type mask in the parameters");
8877 return BAD_VALUE;
8878 }
8879
8880 if (mUpdateDebugLevel) {
8881 uint32_t dummyDebugLevel = 0;
8882 /* The value of dummyDebugLevel is irrelavent. On
8883 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8884 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8885 dummyDebugLevel)) {
8886 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8887 return BAD_VALUE;
8888 }
8889 mUpdateDebugLevel = false;
8890 }
8891
8892 if(request->settings != NULL){
8893 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8894 if (blob_request)
8895 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8896 }
8897
8898 return rc;
8899}
8900
8901/*===========================================================================
8902 * FUNCTION : setReprocParameters
8903 *
8904 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8905 * return it.
8906 *
8907 * PARAMETERS :
8908 * @request : request that needs to be serviced
8909 *
8910 * RETURN : success: NO_ERROR
8911 * failure:
8912 *==========================================================================*/
8913int32_t QCamera3HardwareInterface::setReprocParameters(
8914 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8915 uint32_t snapshotStreamId)
8916{
8917 /*translate from camera_metadata_t type to parm_type_t*/
8918 int rc = 0;
8919
8920 if (NULL == request->settings){
8921 LOGE("Reprocess settings cannot be NULL");
8922 return BAD_VALUE;
8923 }
8924
8925 if (NULL == reprocParam) {
8926 LOGE("Invalid reprocessing metadata buffer");
8927 return BAD_VALUE;
8928 }
8929 clear_metadata_buffer(reprocParam);
8930
8931 /*we need to update the frame number in the parameters*/
8932 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8933 request->frame_number)) {
8934 LOGE("Failed to set the frame number in the parameters");
8935 return BAD_VALUE;
8936 }
8937
8938 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8939 if (rc < 0) {
8940 LOGE("Failed to translate reproc request");
8941 return rc;
8942 }
8943
8944 CameraMetadata frame_settings;
8945 frame_settings = request->settings;
8946 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8947 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8948 int32_t *crop_count =
8949 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8950 int32_t *crop_data =
8951 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8952 int32_t *roi_map =
8953 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8954 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8955 cam_crop_data_t crop_meta;
8956 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8957 crop_meta.num_of_streams = 1;
8958 crop_meta.crop_info[0].crop.left = crop_data[0];
8959 crop_meta.crop_info[0].crop.top = crop_data[1];
8960 crop_meta.crop_info[0].crop.width = crop_data[2];
8961 crop_meta.crop_info[0].crop.height = crop_data[3];
8962
8963 crop_meta.crop_info[0].roi_map.left =
8964 roi_map[0];
8965 crop_meta.crop_info[0].roi_map.top =
8966 roi_map[1];
8967 crop_meta.crop_info[0].roi_map.width =
8968 roi_map[2];
8969 crop_meta.crop_info[0].roi_map.height =
8970 roi_map[3];
8971
8972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8973 rc = BAD_VALUE;
8974 }
8975 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8976 request->input_buffer->stream,
8977 crop_meta.crop_info[0].crop.left,
8978 crop_meta.crop_info[0].crop.top,
8979 crop_meta.crop_info[0].crop.width,
8980 crop_meta.crop_info[0].crop.height);
8981 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8982 request->input_buffer->stream,
8983 crop_meta.crop_info[0].roi_map.left,
8984 crop_meta.crop_info[0].roi_map.top,
8985 crop_meta.crop_info[0].roi_map.width,
8986 crop_meta.crop_info[0].roi_map.height);
8987 } else {
8988 LOGE("Invalid reprocess crop count %d!", *crop_count);
8989 }
8990 } else {
8991 LOGE("No crop data from matching output stream");
8992 }
8993
8994 /* These settings are not needed for regular requests so handle them specially for
8995 reprocess requests; information needed for EXIF tags */
8996 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8997 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8998 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8999 if (NAME_NOT_FOUND != val) {
9000 uint32_t flashMode = (uint32_t)val;
9001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9002 rc = BAD_VALUE;
9003 }
9004 } else {
9005 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9006 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9007 }
9008 } else {
9009 LOGH("No flash mode in reprocess settings");
9010 }
9011
9012 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9013 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9015 rc = BAD_VALUE;
9016 }
9017 } else {
9018 LOGH("No flash state in reprocess settings");
9019 }
9020
9021 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9022 uint8_t *reprocessFlags =
9023 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9025 *reprocessFlags)) {
9026 rc = BAD_VALUE;
9027 }
9028 }
9029
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009030 // Add metadata which reprocess needs
9031 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9032 cam_reprocess_info_t *repro_info =
9033 (cam_reprocess_info_t *)frame_settings.find
9034 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009035 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009036 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009037 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009038 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009039 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009040 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009041 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009042 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009043 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009044 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -07009045 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009046 repro_info->pipeline_flip);
9047 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
9048 repro_info->af_roi);
9049 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
9050 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -07009051 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
9052 CAM_INTF_PARM_ROTATION metadata then has been added in
9053 translateToHalMetadata. HAL need to keep this new rotation
9054 metadata. Otherwise, the old rotation info saved in the vendor tag
9055 would be used */
9056 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
9057 CAM_INTF_PARM_ROTATION, reprocParam) {
9058 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
9059 } else {
9060 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009061 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009063 }
9064
9065 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
9066 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
9067 roi.width and roi.height would be the final JPEG size.
9068 For now, HAL only checks this for reprocess request */
9069 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
9070 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
9071 uint8_t *enable =
9072 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
9073 if (*enable == TRUE) {
9074 int32_t *crop_data =
9075 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
9076 cam_stream_crop_info_t crop_meta;
9077 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
9078 crop_meta.stream_id = 0;
9079 crop_meta.crop.left = crop_data[0];
9080 crop_meta.crop.top = crop_data[1];
9081 crop_meta.crop.width = crop_data[2];
9082 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009083 // The JPEG crop roi should match cpp output size
9084 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
9085 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
9086 crop_meta.roi_map.left = 0;
9087 crop_meta.roi_map.top = 0;
9088 crop_meta.roi_map.width = cpp_crop->crop.width;
9089 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07009090 }
9091 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
9092 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009093 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07009094 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009095 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
9096 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07009097 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009098 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
9099
9100 // Add JPEG scale information
9101 cam_dimension_t scale_dim;
9102 memset(&scale_dim, 0, sizeof(cam_dimension_t));
9103 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
9104 int32_t *roi =
9105 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
9106 scale_dim.width = roi[2];
9107 scale_dim.height = roi[3];
9108 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
9109 scale_dim);
9110 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
9111 scale_dim.width, scale_dim.height, mCameraId);
9112 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009113 }
9114 }
9115
9116 return rc;
9117}
9118
9119/*===========================================================================
9120 * FUNCTION : saveRequestSettings
9121 *
9122 * DESCRIPTION: Add any settings that might have changed to the request settings
9123 * and save the settings to be applied on the frame
9124 *
9125 * PARAMETERS :
9126 * @jpegMetadata : the extracted and/or modified jpeg metadata
9127 * @request : request with initial settings
9128 *
9129 * RETURN :
9130 * camera_metadata_t* : pointer to the saved request settings
9131 *==========================================================================*/
9132camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9133 const CameraMetadata &jpegMetadata,
9134 camera3_capture_request_t *request)
9135{
9136 camera_metadata_t *resultMetadata;
9137 CameraMetadata camMetadata;
9138 camMetadata = request->settings;
9139
9140 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9141 int32_t thumbnail_size[2];
9142 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9143 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9144 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9145 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9146 }
9147
9148 if (request->input_buffer != NULL) {
9149 uint8_t reprocessFlags = 1;
9150 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
9151 (uint8_t*)&reprocessFlags,
9152 sizeof(reprocessFlags));
9153 }
9154
9155 resultMetadata = camMetadata.release();
9156 return resultMetadata;
9157}
9158
9159/*===========================================================================
9160 * FUNCTION : setHalFpsRange
9161 *
9162 * DESCRIPTION: set FPS range parameter
9163 *
9164 *
9165 * PARAMETERS :
9166 * @settings : Metadata from framework
9167 * @hal_metadata: Metadata buffer
9168 *
9169 *
9170 * RETURN : success: NO_ERROR
9171 * failure:
9172 *==========================================================================*/
9173int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9174 metadata_buffer_t *hal_metadata)
9175{
9176 int32_t rc = NO_ERROR;
9177 cam_fps_range_t fps_range;
9178 fps_range.min_fps = (float)
9179 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9180 fps_range.max_fps = (float)
9181 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9182 fps_range.video_min_fps = fps_range.min_fps;
9183 fps_range.video_max_fps = fps_range.max_fps;
9184
9185 LOGD("aeTargetFpsRange fps: [%f %f]",
9186 fps_range.min_fps, fps_range.max_fps);
9187 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9188 * follows:
9189 * ---------------------------------------------------------------|
9190 * Video stream is absent in configure_streams |
9191 * (Camcorder preview before the first video record |
9192 * ---------------------------------------------------------------|
9193 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9194 * | | | vid_min/max_fps|
9195 * ---------------------------------------------------------------|
9196 * NO | [ 30, 240] | 240 | [240, 240] |
9197 * |-------------|-------------|----------------|
9198 * | [240, 240] | 240 | [240, 240] |
9199 * ---------------------------------------------------------------|
9200 * Video stream is present in configure_streams |
9201 * ---------------------------------------------------------------|
9202 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9203 * | | | vid_min/max_fps|
9204 * ---------------------------------------------------------------|
9205 * NO | [ 30, 240] | 240 | [240, 240] |
9206 * (camcorder prev |-------------|-------------|----------------|
9207 * after video rec | [240, 240] | 240 | [240, 240] |
9208 * is stopped) | | | |
9209 * ---------------------------------------------------------------|
9210 * YES | [ 30, 240] | 240 | [240, 240] |
9211 * |-------------|-------------|----------------|
9212 * | [240, 240] | 240 | [240, 240] |
9213 * ---------------------------------------------------------------|
9214 * When Video stream is absent in configure_streams,
9215 * preview fps = sensor_fps / batchsize
9216 * Eg: for 240fps at batchSize 4, preview = 60fps
9217 * for 120fps at batchSize 4, preview = 30fps
9218 *
9219 * When video stream is present in configure_streams, preview fps is as per
9220 * the ratio of preview buffers to video buffers requested in process
9221 * capture request
9222 */
9223 mBatchSize = 0;
9224 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9225 fps_range.min_fps = fps_range.video_max_fps;
9226 fps_range.video_min_fps = fps_range.video_max_fps;
9227 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9228 fps_range.max_fps);
9229 if (NAME_NOT_FOUND != val) {
9230 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9232 return BAD_VALUE;
9233 }
9234
9235 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9236 /* If batchmode is currently in progress and the fps changes,
9237 * set the flag to restart the sensor */
9238 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9239 (mHFRVideoFps != fps_range.max_fps)) {
9240 mNeedSensorRestart = true;
9241 }
9242 mHFRVideoFps = fps_range.max_fps;
9243 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9244 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9245 mBatchSize = MAX_HFR_BATCH_SIZE;
9246 }
9247 }
9248 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9249
9250 }
9251 } else {
9252 /* HFR mode is session param in backend/ISP. This should be reset when
9253 * in non-HFR mode */
9254 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9256 return BAD_VALUE;
9257 }
9258 }
9259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9260 return BAD_VALUE;
9261 }
9262 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9263 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9264 return rc;
9265}
9266
9267/*===========================================================================
9268 * FUNCTION : translateToHalMetadata
9269 *
9270 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9271 *
9272 *
9273 * PARAMETERS :
9274 * @request : request sent from framework
9275 *
9276 *
9277 * RETURN : success: NO_ERROR
9278 * failure:
9279 *==========================================================================*/
9280int QCamera3HardwareInterface::translateToHalMetadata
9281 (const camera3_capture_request_t *request,
9282 metadata_buffer_t *hal_metadata,
9283 uint32_t snapshotStreamId)
9284{
9285 int rc = 0;
9286 CameraMetadata frame_settings;
9287 frame_settings = request->settings;
9288
9289 /* Do not change the order of the following list unless you know what you are
9290 * doing.
9291 * The order is laid out in such a way that parameters in the front of the table
9292 * may be used to override the parameters later in the table. Examples are:
9293 * 1. META_MODE should precede AEC/AWB/AF MODE
9294 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9295 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9296 * 4. Any mode should precede it's corresponding settings
9297 */
9298 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9299 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9301 rc = BAD_VALUE;
9302 }
9303 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9304 if (rc != NO_ERROR) {
9305 LOGE("extractSceneMode failed");
9306 }
9307 }
9308
9309 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9310 uint8_t fwk_aeMode =
9311 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9312 uint8_t aeMode;
9313 int32_t redeye;
9314
9315 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9316 aeMode = CAM_AE_MODE_OFF;
9317 } else {
9318 aeMode = CAM_AE_MODE_ON;
9319 }
9320 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9321 redeye = 1;
9322 } else {
9323 redeye = 0;
9324 }
9325
9326 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9327 fwk_aeMode);
9328 if (NAME_NOT_FOUND != val) {
9329 int32_t flashMode = (int32_t)val;
9330 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9331 }
9332
9333 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9335 rc = BAD_VALUE;
9336 }
9337 }
9338
9339 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9340 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9341 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9342 fwk_whiteLevel);
9343 if (NAME_NOT_FOUND != val) {
9344 uint8_t whiteLevel = (uint8_t)val;
9345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9346 rc = BAD_VALUE;
9347 }
9348 }
9349 }
9350
9351 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9352 uint8_t fwk_cacMode =
9353 frame_settings.find(
9354 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9355 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9356 fwk_cacMode);
9357 if (NAME_NOT_FOUND != val) {
9358 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9359 bool entryAvailable = FALSE;
9360 // Check whether Frameworks set CAC mode is supported in device or not
9361 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9362 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9363 entryAvailable = TRUE;
9364 break;
9365 }
9366 }
9367 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9368 // If entry not found then set the device supported mode instead of frameworks mode i.e,
9369 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9370 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9371 if (entryAvailable == FALSE) {
9372 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9373 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9374 } else {
9375 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9376 // High is not supported and so set the FAST as spec say's underlying
9377 // device implementation can be the same for both modes.
9378 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9379 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9380 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9381 // in order to avoid the fps drop due to high quality
9382 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9383 } else {
9384 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9385 }
9386 }
9387 }
9388 LOGD("Final cacMode is %d", cacMode);
9389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9390 rc = BAD_VALUE;
9391 }
9392 } else {
9393 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9394 }
9395 }
9396
9397 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9398 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9399 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9400 fwk_focusMode);
9401 if (NAME_NOT_FOUND != val) {
9402 uint8_t focusMode = (uint8_t)val;
9403 LOGD("set focus mode %d", focusMode);
9404 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9405 rc = BAD_VALUE;
9406 }
9407 }
9408 }
9409
9410 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9411 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9413 focalDistance)) {
9414 rc = BAD_VALUE;
9415 }
9416 }
9417
9418 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9419 uint8_t fwk_antibandingMode =
9420 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9421 int val = lookupHalName(ANTIBANDING_MODES_MAP,
9422 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9423 if (NAME_NOT_FOUND != val) {
9424 uint32_t hal_antibandingMode = (uint32_t)val;
9425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9426 hal_antibandingMode)) {
9427 rc = BAD_VALUE;
9428 }
9429 }
9430 }
9431
9432 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9433 int32_t expCompensation = frame_settings.find(
9434 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9435 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9436 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9437 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9438 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9439 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9440 expCompensation)) {
9441 rc = BAD_VALUE;
9442 }
9443 }
9444
9445 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9446 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9447 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9448 rc = BAD_VALUE;
9449 }
9450 }
9451 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9452 rc = setHalFpsRange(frame_settings, hal_metadata);
9453 if (rc != NO_ERROR) {
9454 LOGE("setHalFpsRange failed");
9455 }
9456 }
9457
9458 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9459 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9461 rc = BAD_VALUE;
9462 }
9463 }
9464
9465 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9466 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9467 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9468 fwk_effectMode);
9469 if (NAME_NOT_FOUND != val) {
9470 uint8_t effectMode = (uint8_t)val;
9471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9472 rc = BAD_VALUE;
9473 }
9474 }
9475 }
9476
9477 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9478 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9479 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9480 colorCorrectMode)) {
9481 rc = BAD_VALUE;
9482 }
9483 }
9484
9485 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9486 cam_color_correct_gains_t colorCorrectGains;
9487 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
9488 colorCorrectGains.gains[i] =
9489 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9490 }
9491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9492 colorCorrectGains)) {
9493 rc = BAD_VALUE;
9494 }
9495 }
9496
9497 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9498 cam_color_correct_matrix_t colorCorrectTransform;
9499 cam_rational_type_t transform_elem;
9500 size_t num = 0;
9501 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9502 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9503 transform_elem.numerator =
9504 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9505 transform_elem.denominator =
9506 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9507 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9508 num++;
9509 }
9510 }
9511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9512 colorCorrectTransform)) {
9513 rc = BAD_VALUE;
9514 }
9515 }
9516
9517 cam_trigger_t aecTrigger;
9518 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9519 aecTrigger.trigger_id = -1;
9520 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9521 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9522 aecTrigger.trigger =
9523 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9524 aecTrigger.trigger_id =
9525 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9526 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9527 aecTrigger)) {
9528 rc = BAD_VALUE;
9529 }
9530 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9531 aecTrigger.trigger, aecTrigger.trigger_id);
9532 }
9533
9534 /*af_trigger must come with a trigger id*/
9535 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9536 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9537 cam_trigger_t af_trigger;
9538 af_trigger.trigger =
9539 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9540 af_trigger.trigger_id =
9541 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9543 rc = BAD_VALUE;
9544 }
9545 LOGD("AfTrigger: %d AfTriggerID: %d",
9546 af_trigger.trigger, af_trigger.trigger_id);
9547 }
9548
9549 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9550 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9551 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9552 rc = BAD_VALUE;
9553 }
9554 }
9555 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9556 cam_edge_application_t edge_application;
9557 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9558 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9559 edge_application.sharpness = 0;
9560 } else {
9561 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9562 }
9563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9564 rc = BAD_VALUE;
9565 }
9566 }
9567
9568 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9569 int32_t respectFlashMode = 1;
9570 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9571 uint8_t fwk_aeMode =
9572 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9573 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9574 respectFlashMode = 0;
9575 LOGH("AE Mode controls flash, ignore android.flash.mode");
9576 }
9577 }
9578 if (respectFlashMode) {
9579 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9580 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9581 LOGH("flash mode after mapping %d", val);
9582 // To check: CAM_INTF_META_FLASH_MODE usage
9583 if (NAME_NOT_FOUND != val) {
9584 uint8_t flashMode = (uint8_t)val;
9585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9586 rc = BAD_VALUE;
9587 }
9588 }
9589 }
9590 }
9591
9592 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9593 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9595 rc = BAD_VALUE;
9596 }
9597 }
9598
9599 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9600 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9602 flashFiringTime)) {
9603 rc = BAD_VALUE;
9604 }
9605 }
9606
9607 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9608 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9610 hotPixelMode)) {
9611 rc = BAD_VALUE;
9612 }
9613 }
9614
9615 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9616 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9617 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9618 lensAperture)) {
9619 rc = BAD_VALUE;
9620 }
9621 }
9622
9623 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9624 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9626 filterDensity)) {
9627 rc = BAD_VALUE;
9628 }
9629 }
9630
9631 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9632 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9634 focalLength)) {
9635 rc = BAD_VALUE;
9636 }
9637 }
9638
9639 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9640 uint8_t optStabMode =
9641 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9643 optStabMode)) {
9644 rc = BAD_VALUE;
9645 }
9646 }
9647
9648 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9649 uint8_t videoStabMode =
9650 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9651 LOGD("videoStabMode from APP = %d", videoStabMode);
9652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9653 videoStabMode)) {
9654 rc = BAD_VALUE;
9655 }
9656 }
9657
9658
9659 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9660 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9662 noiseRedMode)) {
9663 rc = BAD_VALUE;
9664 }
9665 }
9666
9667 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9668 float reprocessEffectiveExposureFactor =
9669 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9671 reprocessEffectiveExposureFactor)) {
9672 rc = BAD_VALUE;
9673 }
9674 }
9675
9676 cam_crop_region_t scalerCropRegion;
9677 bool scalerCropSet = false;
9678 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9679 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9680 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9681 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9682 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9683
9684 // Map coordinate system from active array to sensor output.
9685 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9686 scalerCropRegion.width, scalerCropRegion.height);
9687
9688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9689 scalerCropRegion)) {
9690 rc = BAD_VALUE;
9691 }
9692 scalerCropSet = true;
9693 }
9694
9695 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9696 int64_t sensorExpTime =
9697 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9698 LOGD("setting sensorExpTime %lld", sensorExpTime);
9699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9700 sensorExpTime)) {
9701 rc = BAD_VALUE;
9702 }
9703 }
9704
9705 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9706 int64_t sensorFrameDuration =
9707 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9708 int64_t minFrameDuration = getMinFrameDuration(request);
9709 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9710 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9711 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9712 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9714 sensorFrameDuration)) {
9715 rc = BAD_VALUE;
9716 }
9717 }
9718
9719 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9720 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9721 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9722 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9723 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9724 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9725 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9727 sensorSensitivity)) {
9728 rc = BAD_VALUE;
9729 }
9730 }
9731
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009732#ifndef USE_HAL_3_3
9733 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9734 int32_t ispSensitivity =
9735 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9736 if (ispSensitivity <
9737 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9738 ispSensitivity =
9739 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9740 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9741 }
9742 if (ispSensitivity >
9743 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9744 ispSensitivity =
9745 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9746 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9747 }
9748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9749 ispSensitivity)) {
9750 rc = BAD_VALUE;
9751 }
9752 }
9753#endif
9754
Thierry Strudel3d639192016-09-09 11:52:26 -07009755 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9756 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9758 rc = BAD_VALUE;
9759 }
9760 }
9761
9762 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9763 uint8_t fwk_facedetectMode =
9764 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9765
9766 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9767 fwk_facedetectMode);
9768
9769 if (NAME_NOT_FOUND != val) {
9770 uint8_t facedetectMode = (uint8_t)val;
9771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9772 facedetectMode)) {
9773 rc = BAD_VALUE;
9774 }
9775 }
9776 }
9777
9778 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9779 uint8_t histogramMode =
9780 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9782 histogramMode)) {
9783 rc = BAD_VALUE;
9784 }
9785 }
9786
9787 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9788 uint8_t sharpnessMapMode =
9789 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9791 sharpnessMapMode)) {
9792 rc = BAD_VALUE;
9793 }
9794 }
9795
9796 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9797 uint8_t tonemapMode =
9798 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9800 rc = BAD_VALUE;
9801 }
9802 }
9803 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9804 /*All tonemap channels will have the same number of points*/
9805 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9806 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9807 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9808 cam_rgb_tonemap_curves tonemapCurves;
9809 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9810 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9811 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9812 tonemapCurves.tonemap_points_cnt,
9813 CAM_MAX_TONEMAP_CURVE_SIZE);
9814 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9815 }
9816
9817 /* ch0 = G*/
9818 size_t point = 0;
9819 cam_tonemap_curve_t tonemapCurveGreen;
9820 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9821 for (size_t j = 0; j < 2; j++) {
9822 tonemapCurveGreen.tonemap_points[i][j] =
9823 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9824 point++;
9825 }
9826 }
9827 tonemapCurves.curves[0] = tonemapCurveGreen;
9828
9829 /* ch 1 = B */
9830 point = 0;
9831 cam_tonemap_curve_t tonemapCurveBlue;
9832 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9833 for (size_t j = 0; j < 2; j++) {
9834 tonemapCurveBlue.tonemap_points[i][j] =
9835 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9836 point++;
9837 }
9838 }
9839 tonemapCurves.curves[1] = tonemapCurveBlue;
9840
9841 /* ch 2 = R */
9842 point = 0;
9843 cam_tonemap_curve_t tonemapCurveRed;
9844 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9845 for (size_t j = 0; j < 2; j++) {
9846 tonemapCurveRed.tonemap_points[i][j] =
9847 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9848 point++;
9849 }
9850 }
9851 tonemapCurves.curves[2] = tonemapCurveRed;
9852
9853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9854 tonemapCurves)) {
9855 rc = BAD_VALUE;
9856 }
9857 }
9858
9859 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9860 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9862 captureIntent)) {
9863 rc = BAD_VALUE;
9864 }
9865 }
9866
9867 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9868 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9870 blackLevelLock)) {
9871 rc = BAD_VALUE;
9872 }
9873 }
9874
9875 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9876 uint8_t lensShadingMapMode =
9877 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9879 lensShadingMapMode)) {
9880 rc = BAD_VALUE;
9881 }
9882 }
9883
9884 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9885 cam_area_t roi;
9886 bool reset = true;
9887 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9888
9889 // Map coordinate system from active array to sensor output.
9890 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9891 roi.rect.height);
9892
9893 if (scalerCropSet) {
9894 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9895 }
9896 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9897 rc = BAD_VALUE;
9898 }
9899 }
9900
9901 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9902 cam_area_t roi;
9903 bool reset = true;
9904 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9905
9906 // Map coordinate system from active array to sensor output.
9907 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9908 roi.rect.height);
9909
9910 if (scalerCropSet) {
9911 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9912 }
9913 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9914 rc = BAD_VALUE;
9915 }
9916 }
9917
9918 // CDS for non-HFR non-video mode
9919 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9920 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9921 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9922 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9923 LOGE("Invalid CDS mode %d!", *fwk_cds);
9924 } else {
9925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9926 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9927 rc = BAD_VALUE;
9928 }
9929 }
9930 }
9931
Thierry Strudel04e026f2016-10-10 11:27:36 -07009932 // Video HDR
9933 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
9934 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
9935 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
9936 rc = setVideoHdrMode(mParameters, vhdr);
9937 if (rc != NO_ERROR) {
9938 LOGE("setVideoHDR is failed");
9939 }
9940 }
9941
9942 //IR
9943 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
9944 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
9945 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
9946 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
9947 LOGE("Invalid IR mode %d!", fwk_ir);
9948 } else {
9949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9950 CAM_INTF_META_IR_MODE, fwk_ir)) {
9951 rc = BAD_VALUE;
9952 }
9953 }
9954 }
9955
Thierry Strudel269c81a2016-10-12 12:13:59 -07009956 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
9957 float aec_speed;
9958 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
9959 LOGD("AEC Speed :%f", aec_speed);
9960 if ( aec_speed < 0 ) {
9961 LOGE("Invalid AEC mode %f!", aec_speed);
9962 } else {
9963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
9964 aec_speed)) {
9965 rc = BAD_VALUE;
9966 }
9967 }
9968 }
9969
9970 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
9971 float awb_speed;
9972 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
9973 LOGD("AWB Speed :%f", awb_speed);
9974 if ( awb_speed < 0 ) {
9975 LOGE("Invalid AWB mode %f!", awb_speed);
9976 } else {
9977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
9978 awb_speed)) {
9979 rc = BAD_VALUE;
9980 }
9981 }
9982 }
9983
Thierry Strudel3d639192016-09-09 11:52:26 -07009984 // TNR
9985 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9986 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9987 uint8_t b_TnrRequested = 0;
9988 cam_denoise_param_t tnr;
9989 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9990 tnr.process_plates =
9991 (cam_denoise_process_type_t)frame_settings.find(
9992 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9993 b_TnrRequested = tnr.denoise_enable;
9994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9995 rc = BAD_VALUE;
9996 }
9997 }
9998
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009999 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10000 int32_t* exposure_metering_mode =
10001 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10003 *exposure_metering_mode)) {
10004 rc = BAD_VALUE;
10005 }
10006 }
10007
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10009 int32_t fwk_testPatternMode =
10010 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10011 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10012 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10013
10014 if (NAME_NOT_FOUND != testPatternMode) {
10015 cam_test_pattern_data_t testPatternData;
10016 memset(&testPatternData, 0, sizeof(testPatternData));
10017 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10018 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10019 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10020 int32_t *fwk_testPatternData =
10021 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10022 testPatternData.r = fwk_testPatternData[0];
10023 testPatternData.b = fwk_testPatternData[3];
10024 switch (gCamCapability[mCameraId]->color_arrangement) {
10025 case CAM_FILTER_ARRANGEMENT_RGGB:
10026 case CAM_FILTER_ARRANGEMENT_GRBG:
10027 testPatternData.gr = fwk_testPatternData[1];
10028 testPatternData.gb = fwk_testPatternData[2];
10029 break;
10030 case CAM_FILTER_ARRANGEMENT_GBRG:
10031 case CAM_FILTER_ARRANGEMENT_BGGR:
10032 testPatternData.gr = fwk_testPatternData[2];
10033 testPatternData.gb = fwk_testPatternData[1];
10034 break;
10035 default:
10036 LOGE("color arrangement %d is not supported",
10037 gCamCapability[mCameraId]->color_arrangement);
10038 break;
10039 }
10040 }
10041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
10042 testPatternData)) {
10043 rc = BAD_VALUE;
10044 }
10045 } else {
10046 LOGE("Invalid framework sensor test pattern mode %d",
10047 fwk_testPatternMode);
10048 }
10049 }
10050
10051 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10052 size_t count = 0;
10053 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
10054 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
10055 gps_coords.data.d, gps_coords.count, count);
10056 if (gps_coords.count != count) {
10057 rc = BAD_VALUE;
10058 }
10059 }
10060
10061 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
10062 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
10063 size_t count = 0;
10064 const char *gps_methods_src = (const char *)
10065 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
10066 memset(gps_methods, '\0', sizeof(gps_methods));
10067 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
10068 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
10069 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
10070 if (GPS_PROCESSING_METHOD_SIZE != count) {
10071 rc = BAD_VALUE;
10072 }
10073 }
10074
10075 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
10076 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
10077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
10078 gps_timestamp)) {
10079 rc = BAD_VALUE;
10080 }
10081 }
10082
10083 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
10084 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
10085 cam_rotation_info_t rotation_info;
10086 if (orientation == 0) {
10087 rotation_info.rotation = ROTATE_0;
10088 } else if (orientation == 90) {
10089 rotation_info.rotation = ROTATE_90;
10090 } else if (orientation == 180) {
10091 rotation_info.rotation = ROTATE_180;
10092 } else if (orientation == 270) {
10093 rotation_info.rotation = ROTATE_270;
10094 }
10095 rotation_info.streamId = snapshotStreamId;
10096 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
10097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
10098 rc = BAD_VALUE;
10099 }
10100 }
10101
10102 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
10103 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
10104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
10105 rc = BAD_VALUE;
10106 }
10107 }
10108
10109 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
10110 uint32_t thumb_quality = (uint32_t)
10111 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
10112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
10113 thumb_quality)) {
10114 rc = BAD_VALUE;
10115 }
10116 }
10117
10118 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10119 cam_dimension_t dim;
10120 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10121 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10122 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
10123 rc = BAD_VALUE;
10124 }
10125 }
10126
10127 // Internal metadata
10128 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
10129 size_t count = 0;
10130 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
10131 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
10132 privatedata.data.i32, privatedata.count, count);
10133 if (privatedata.count != count) {
10134 rc = BAD_VALUE;
10135 }
10136 }
10137
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010138 // ISO/Exposure Priority
10139 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
10140 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
10141 cam_priority_mode_t mode =
10142 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
10143 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
10144 cam_intf_parm_manual_3a_t use_iso_exp_pty;
10145 use_iso_exp_pty.previewOnly = FALSE;
10146 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
10147 use_iso_exp_pty.value = *ptr;
10148
10149 if(CAM_ISO_PRIORITY == mode) {
10150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
10151 use_iso_exp_pty)) {
10152 rc = BAD_VALUE;
10153 }
10154 }
10155 else {
10156 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
10157 use_iso_exp_pty)) {
10158 rc = BAD_VALUE;
10159 }
10160 }
10161 }
10162 }
10163
10164 // Saturation
10165 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
10166 int32_t* use_saturation =
10167 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
10168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
10169 rc = BAD_VALUE;
10170 }
10171 }
10172
Thierry Strudel3d639192016-09-09 11:52:26 -070010173 // EV step
10174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
10175 gCamCapability[mCameraId]->exp_compensation_step)) {
10176 rc = BAD_VALUE;
10177 }
10178
10179 // CDS info
10180 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
10181 cam_cds_data_t *cdsData = (cam_cds_data_t *)
10182 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
10183
10184 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10185 CAM_INTF_META_CDS_DATA, *cdsData)) {
10186 rc = BAD_VALUE;
10187 }
10188 }
10189
10190 return rc;
10191}
10192
10193/*===========================================================================
10194 * FUNCTION : captureResultCb
10195 *
10196 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
10197 *
10198 * PARAMETERS :
10199 * @frame : frame information from mm-camera-interface
10200 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
10201 * @userdata: userdata
10202 *
10203 * RETURN : NONE
10204 *==========================================================================*/
10205void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
10206 camera3_stream_buffer_t *buffer,
10207 uint32_t frame_number, bool isInputBuffer, void *userdata)
10208{
10209 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10210 if (hw == NULL) {
10211 LOGE("Invalid hw %p", hw);
10212 return;
10213 }
10214
10215 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10216 return;
10217}
10218
10219
10220/*===========================================================================
10221 * FUNCTION : initialize
10222 *
10223 * DESCRIPTION: Pass framework callback pointers to HAL
10224 *
10225 * PARAMETERS :
10226 *
10227 *
10228 * RETURN : Success : 0
10229 * Failure: -ENODEV
10230 *==========================================================================*/
10231
10232int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10233 const camera3_callback_ops_t *callback_ops)
10234{
10235 LOGD("E");
10236 QCamera3HardwareInterface *hw =
10237 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10238 if (!hw) {
10239 LOGE("NULL camera device");
10240 return -ENODEV;
10241 }
10242
10243 int rc = hw->initialize(callback_ops);
10244 LOGD("X");
10245 return rc;
10246}
10247
10248/*===========================================================================
10249 * FUNCTION : configure_streams
10250 *
10251 * DESCRIPTION:
10252 *
10253 * PARAMETERS :
10254 *
10255 *
10256 * RETURN : Success: 0
10257 * Failure: -EINVAL (if stream configuration is invalid)
10258 * -ENODEV (fatal error)
10259 *==========================================================================*/
10260
10261int QCamera3HardwareInterface::configure_streams(
10262 const struct camera3_device *device,
10263 camera3_stream_configuration_t *stream_list)
10264{
10265 LOGD("E");
10266 QCamera3HardwareInterface *hw =
10267 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10268 if (!hw) {
10269 LOGE("NULL camera device");
10270 return -ENODEV;
10271 }
10272 int rc = hw->configureStreams(stream_list);
10273 LOGD("X");
10274 return rc;
10275}
10276
10277/*===========================================================================
10278 * FUNCTION : construct_default_request_settings
10279 *
10280 * DESCRIPTION: Configure a settings buffer to meet the required use case
10281 *
10282 * PARAMETERS :
10283 *
10284 *
10285 * RETURN : Success: Return valid metadata
10286 * Failure: Return NULL
10287 *==========================================================================*/
10288const camera_metadata_t* QCamera3HardwareInterface::
10289 construct_default_request_settings(const struct camera3_device *device,
10290 int type)
10291{
10292
10293 LOGD("E");
10294 camera_metadata_t* fwk_metadata = NULL;
10295 QCamera3HardwareInterface *hw =
10296 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10297 if (!hw) {
10298 LOGE("NULL camera device");
10299 return NULL;
10300 }
10301
10302 fwk_metadata = hw->translateCapabilityToMetadata(type);
10303
10304 LOGD("X");
10305 return fwk_metadata;
10306}
10307
10308/*===========================================================================
10309 * FUNCTION : process_capture_request
10310 *
10311 * DESCRIPTION:
10312 *
10313 * PARAMETERS :
10314 *
10315 *
10316 * RETURN :
10317 *==========================================================================*/
10318int QCamera3HardwareInterface::process_capture_request(
10319 const struct camera3_device *device,
10320 camera3_capture_request_t *request)
10321{
10322 LOGD("E");
10323 QCamera3HardwareInterface *hw =
10324 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10325 if (!hw) {
10326 LOGE("NULL camera device");
10327 return -EINVAL;
10328 }
10329
10330 int rc = hw->processCaptureRequest(request);
10331 LOGD("X");
10332 return rc;
10333}
10334
10335/*===========================================================================
10336 * FUNCTION : dump
10337 *
10338 * DESCRIPTION:
10339 *
10340 * PARAMETERS :
10341 *
10342 *
10343 * RETURN :
10344 *==========================================================================*/
10345
10346void QCamera3HardwareInterface::dump(
10347 const struct camera3_device *device, int fd)
10348{
10349 /* Log level property is read when "adb shell dumpsys media.camera" is
10350 called so that the log level can be controlled without restarting
10351 the media server */
10352 getLogLevel();
10353
10354 LOGD("E");
10355 QCamera3HardwareInterface *hw =
10356 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10357 if (!hw) {
10358 LOGE("NULL camera device");
10359 return;
10360 }
10361
10362 hw->dump(fd);
10363 LOGD("X");
10364 return;
10365}
10366
10367/*===========================================================================
10368 * FUNCTION : flush
10369 *
10370 * DESCRIPTION:
10371 *
10372 * PARAMETERS :
10373 *
10374 *
10375 * RETURN :
10376 *==========================================================================*/
10377
10378int QCamera3HardwareInterface::flush(
10379 const struct camera3_device *device)
10380{
10381 int rc;
10382 LOGD("E");
10383 QCamera3HardwareInterface *hw =
10384 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10385 if (!hw) {
10386 LOGE("NULL camera device");
10387 return -EINVAL;
10388 }
10389
10390 pthread_mutex_lock(&hw->mMutex);
10391 // Validate current state
10392 switch (hw->mState) {
10393 case STARTED:
10394 /* valid state */
10395 break;
10396
10397 case ERROR:
10398 pthread_mutex_unlock(&hw->mMutex);
10399 hw->handleCameraDeviceError();
10400 return -ENODEV;
10401
10402 default:
10403 LOGI("Flush returned during state %d", hw->mState);
10404 pthread_mutex_unlock(&hw->mMutex);
10405 return 0;
10406 }
10407 pthread_mutex_unlock(&hw->mMutex);
10408
10409 rc = hw->flush(true /* restart channels */ );
10410 LOGD("X");
10411 return rc;
10412}
10413
10414/*===========================================================================
10415 * FUNCTION : close_camera_device
10416 *
10417 * DESCRIPTION:
10418 *
10419 * PARAMETERS :
10420 *
10421 *
10422 * RETURN :
10423 *==========================================================================*/
10424int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10425{
10426 int ret = NO_ERROR;
10427 QCamera3HardwareInterface *hw =
10428 reinterpret_cast<QCamera3HardwareInterface *>(
10429 reinterpret_cast<camera3_device_t *>(device)->priv);
10430 if (!hw) {
10431 LOGE("NULL camera device");
10432 return BAD_VALUE;
10433 }
10434
10435 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10436 delete hw;
10437 LOGI("[KPI Perf]: X");
10438 return ret;
10439}
10440
10441/*===========================================================================
10442 * FUNCTION : getWaveletDenoiseProcessPlate
10443 *
10444 * DESCRIPTION: query wavelet denoise process plate
10445 *
10446 * PARAMETERS : None
10447 *
10448 * RETURN : WNR prcocess plate value
10449 *==========================================================================*/
10450cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10451{
10452 char prop[PROPERTY_VALUE_MAX];
10453 memset(prop, 0, sizeof(prop));
10454 property_get("persist.denoise.process.plates", prop, "0");
10455 int processPlate = atoi(prop);
10456 switch(processPlate) {
10457 case 0:
10458 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10459 case 1:
10460 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10461 case 2:
10462 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10463 case 3:
10464 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10465 default:
10466 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10467 }
10468}
10469
10470
10471/*===========================================================================
10472 * FUNCTION : getTemporalDenoiseProcessPlate
10473 *
10474 * DESCRIPTION: query temporal denoise process plate
10475 *
10476 * PARAMETERS : None
10477 *
10478 * RETURN : TNR prcocess plate value
10479 *==========================================================================*/
10480cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10481{
10482 char prop[PROPERTY_VALUE_MAX];
10483 memset(prop, 0, sizeof(prop));
10484 property_get("persist.tnr.process.plates", prop, "0");
10485 int processPlate = atoi(prop);
10486 switch(processPlate) {
10487 case 0:
10488 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10489 case 1:
10490 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10491 case 2:
10492 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10493 case 3:
10494 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10495 default:
10496 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10497 }
10498}
10499
10500
10501/*===========================================================================
10502 * FUNCTION : extractSceneMode
10503 *
10504 * DESCRIPTION: Extract scene mode from frameworks set metadata
10505 *
10506 * PARAMETERS :
10507 * @frame_settings: CameraMetadata reference
10508 * @metaMode: ANDROID_CONTORL_MODE
10509 * @hal_metadata: hal metadata structure
10510 *
10511 * RETURN : None
10512 *==========================================================================*/
10513int32_t QCamera3HardwareInterface::extractSceneMode(
10514 const CameraMetadata &frame_settings, uint8_t metaMode,
10515 metadata_buffer_t *hal_metadata)
10516{
10517 int32_t rc = NO_ERROR;
10518
10519 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10520 camera_metadata_ro_entry entry =
10521 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10522 if (0 == entry.count)
10523 return rc;
10524
10525 uint8_t fwk_sceneMode = entry.data.u8[0];
10526
10527 int val = lookupHalName(SCENE_MODES_MAP,
10528 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10529 fwk_sceneMode);
10530 if (NAME_NOT_FOUND != val) {
10531 uint8_t sceneMode = (uint8_t)val;
10532 LOGD("sceneMode: %d", sceneMode);
10533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10534 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10535 rc = BAD_VALUE;
10536 }
10537 }
10538 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10539 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10540 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10541 LOGD("sceneMode: %d", sceneMode);
10542 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10543 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10544 rc = BAD_VALUE;
10545 }
10546 }
10547 return rc;
10548}
10549
10550/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070010551 * FUNCTION : setVideoHdrMode
10552 *
10553 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
10554 *
10555 * PARAMETERS :
10556 * @hal_metadata: hal metadata structure
10557 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
10558 *
10559 * RETURN : None
10560 *==========================================================================*/
10561int32_t QCamera3HardwareInterface::setVideoHdrMode(
10562 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
10563{
10564 int32_t rc = NO_ERROR;
10565 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
10566 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
10567 rc = BAD_VALUE;
10568 } else {
10569 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
10570 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
10571 LOGD("Setting HDR mode Off");
10572 vhdr_type = CAM_SENSOR_HDR_OFF;
10573 } else {
10574 char video_hdr_prop[PROPERTY_VALUE_MAX];
10575 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
10576 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
10577 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
10578 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10579 CAM_QCOM_FEATURE_SENSOR_HDR) &&
10580 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
10581 LOGD("Setting HDR mode In Sensor");
10582 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
10583 }
10584 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10585 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
10586 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
10587 LOGD("Setting HDR mode Zigzag");
10588 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
10589 }
10590 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10591 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
10592 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
10593 LOGD("Setting HDR mode Staggered");
10594 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
10595 }
10596 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
10597 LOGD("HDR mode not supported");
10598 rc = BAD_VALUE;
10599 }
10600 }
10601 if(rc == NO_ERROR) {
10602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10603 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
10604 rc = BAD_VALUE;
10605 }
10606 }
10607 }
10608 return rc;
10609}
10610
10611/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070010612 * FUNCTION : needRotationReprocess
10613 *
10614 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10615 *
10616 * PARAMETERS : none
10617 *
10618 * RETURN : true: needed
10619 * false: no need
10620 *==========================================================================*/
10621bool QCamera3HardwareInterface::needRotationReprocess()
10622{
10623 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10624 // current rotation is not zero, and pp has the capability to process rotation
10625 LOGH("need do reprocess for rotation");
10626 return true;
10627 }
10628
10629 return false;
10630}
10631
10632/*===========================================================================
10633 * FUNCTION : needReprocess
10634 *
10635 * DESCRIPTION: if reprocess in needed
10636 *
10637 * PARAMETERS : none
10638 *
10639 * RETURN : true: needed
10640 * false: no need
10641 *==========================================================================*/
10642bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10643{
10644 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10645 // TODO: add for ZSL HDR later
10646 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10647 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10648 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10649 return true;
10650 } else {
10651 LOGH("already post processed frame");
10652 return false;
10653 }
10654 }
10655 return needRotationReprocess();
10656}
10657
10658/*===========================================================================
10659 * FUNCTION : needJpegExifRotation
10660 *
10661 * DESCRIPTION: if rotation from jpeg is needed
10662 *
10663 * PARAMETERS : none
10664 *
10665 * RETURN : true: needed
10666 * false: no need
10667 *==========================================================================*/
10668bool QCamera3HardwareInterface::needJpegExifRotation()
10669{
10670 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10671 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10672 LOGD("Need use Jpeg EXIF Rotation");
10673 return true;
10674 }
10675 return false;
10676}
10677
10678/*===========================================================================
10679 * FUNCTION : addOfflineReprocChannel
10680 *
10681 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10682 * coming from input channel
10683 *
10684 * PARAMETERS :
10685 * @config : reprocess configuration
10686 * @inputChHandle : pointer to the input (source) channel
10687 *
10688 *
10689 * RETURN : Ptr to the newly created channel obj. NULL if failed.
10690 *==========================================================================*/
10691QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10692 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10693{
10694 int32_t rc = NO_ERROR;
10695 QCamera3ReprocessChannel *pChannel = NULL;
10696
10697 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10698 mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
10699 CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10700 if (NULL == pChannel) {
10701 LOGE("no mem for reprocess channel");
10702 return NULL;
10703 }
10704
10705 rc = pChannel->initialize(IS_TYPE_NONE);
10706 if (rc != NO_ERROR) {
10707 LOGE("init reprocess channel failed, ret = %d", rc);
10708 delete pChannel;
10709 return NULL;
10710 }
10711
10712 // pp feature config
10713 cam_pp_feature_config_t pp_config;
10714 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10715
10716 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10717 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10718 & CAM_QCOM_FEATURE_DSDN) {
10719 //Use CPP CDS incase h/w supports it.
10720 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10721 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10722 }
10723 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10724 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10725 }
10726
10727 rc = pChannel->addReprocStreamsFromSource(pp_config,
10728 config,
10729 IS_TYPE_NONE,
10730 mMetadataChannel);
10731
10732 if (rc != NO_ERROR) {
10733 delete pChannel;
10734 return NULL;
10735 }
10736 return pChannel;
10737}
10738
10739/*===========================================================================
10740 * FUNCTION : getMobicatMask
10741 *
10742 * DESCRIPTION: returns mobicat mask
10743 *
10744 * PARAMETERS : none
10745 *
10746 * RETURN : mobicat mask
10747 *
10748 *==========================================================================*/
10749uint8_t QCamera3HardwareInterface::getMobicatMask()
10750{
10751 return m_MobicatMask;
10752}
10753
10754/*===========================================================================
10755 * FUNCTION : setMobicat
10756 *
10757 * DESCRIPTION: set Mobicat on/off.
10758 *
10759 * PARAMETERS :
10760 * @params : none
10761 *
10762 * RETURN : int32_t type of status
10763 * NO_ERROR -- success
10764 * none-zero failure code
10765 *==========================================================================*/
10766int32_t QCamera3HardwareInterface::setMobicat()
10767{
10768 char value [PROPERTY_VALUE_MAX];
10769 property_get("persist.camera.mobicat", value, "0");
10770 int32_t ret = NO_ERROR;
10771 uint8_t enableMobi = (uint8_t)atoi(value);
10772
10773 if (enableMobi) {
10774 tune_cmd_t tune_cmd;
10775 tune_cmd.type = SET_RELOAD_CHROMATIX;
10776 tune_cmd.module = MODULE_ALL;
10777 tune_cmd.value = TRUE;
10778 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10779 CAM_INTF_PARM_SET_VFE_COMMAND,
10780 tune_cmd);
10781
10782 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10783 CAM_INTF_PARM_SET_PP_COMMAND,
10784 tune_cmd);
10785 }
10786 m_MobicatMask = enableMobi;
10787
10788 return ret;
10789}
10790
10791/*===========================================================================
10792* FUNCTION : getLogLevel
10793*
10794* DESCRIPTION: Reads the log level property into a variable
10795*
10796* PARAMETERS :
10797* None
10798*
10799* RETURN :
10800* None
10801*==========================================================================*/
10802void QCamera3HardwareInterface::getLogLevel()
10803{
10804 char prop[PROPERTY_VALUE_MAX];
10805 uint32_t globalLogLevel = 0;
10806
10807 property_get("persist.camera.hal.debug", prop, "0");
10808 int val = atoi(prop);
10809 if (0 <= val) {
10810 gCamHal3LogLevel = (uint32_t)val;
10811 }
10812
10813 property_get("persist.camera.kpi.debug", prop, "1");
10814 gKpiDebugLevel = atoi(prop);
10815
10816 property_get("persist.camera.global.debug", prop, "0");
10817 val = atoi(prop);
10818 if (0 <= val) {
10819 globalLogLevel = (uint32_t)val;
10820 }
10821
10822 /* Highest log level among hal.logs and global.logs is selected */
10823 if (gCamHal3LogLevel < globalLogLevel)
10824 gCamHal3LogLevel = globalLogLevel;
10825
10826 return;
10827}
10828
10829/*===========================================================================
10830 * FUNCTION : validateStreamRotations
10831 *
10832 * DESCRIPTION: Check if the rotations requested are supported
10833 *
10834 * PARAMETERS :
10835 * @stream_list : streams to be configured
10836 *
10837 * RETURN : NO_ERROR on success
10838 * -EINVAL on failure
10839 *
10840 *==========================================================================*/
10841int QCamera3HardwareInterface::validateStreamRotations(
10842 camera3_stream_configuration_t *streamList)
10843{
10844 int rc = NO_ERROR;
10845
10846 /*
10847 * Loop through all streams requested in configuration
10848 * Check if unsupported rotations have been requested on any of them
10849 */
10850 for (size_t j = 0; j < streamList->num_streams; j++){
10851 camera3_stream_t *newStream = streamList->streams[j];
10852
10853 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10854 bool isImplDef = (newStream->format ==
10855 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10856 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10857 isImplDef);
10858
10859 if (isRotated && (!isImplDef || isZsl)) {
10860 LOGE("Error: Unsupported rotation of %d requested for stream"
10861 "type:%d and stream format:%d",
10862 newStream->rotation, newStream->stream_type,
10863 newStream->format);
10864 rc = -EINVAL;
10865 break;
10866 }
10867 }
10868
10869 return rc;
10870}
10871
10872/*===========================================================================
10873* FUNCTION : getFlashInfo
10874*
10875* DESCRIPTION: Retrieve information about whether the device has a flash.
10876*
10877* PARAMETERS :
10878* @cameraId : Camera id to query
10879* @hasFlash : Boolean indicating whether there is a flash device
10880* associated with given camera
10881* @flashNode : If a flash device exists, this will be its device node.
10882*
10883* RETURN :
10884* None
10885*==========================================================================*/
10886void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10887 bool& hasFlash,
10888 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10889{
10890 cam_capability_t* camCapability = gCamCapability[cameraId];
10891 if (NULL == camCapability) {
10892 hasFlash = false;
10893 flashNode[0] = '\0';
10894 } else {
10895 hasFlash = camCapability->flash_available;
10896 strlcpy(flashNode,
10897 (char*)camCapability->flash_dev_name,
10898 QCAMERA_MAX_FILEPATH_LENGTH);
10899 }
10900}
10901
10902/*===========================================================================
10903* FUNCTION : getEepromVersionInfo
10904*
10905* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10906*
10907* PARAMETERS : None
10908*
10909* RETURN : string describing EEPROM version
10910* "\0" if no such info available
10911*==========================================================================*/
10912const char *QCamera3HardwareInterface::getEepromVersionInfo()
10913{
10914 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10915}
10916
10917/*===========================================================================
10918* FUNCTION : getLdafCalib
10919*
10920* DESCRIPTION: Retrieve Laser AF calibration data
10921*
10922* PARAMETERS : None
10923*
10924* RETURN : Two uint32_t describing laser AF calibration data
10925* NULL if none is available.
10926*==========================================================================*/
10927const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10928{
10929 if (mLdafCalibExist) {
10930 return &mLdafCalib[0];
10931 } else {
10932 return NULL;
10933 }
10934}
10935
10936/*===========================================================================
10937 * FUNCTION : dynamicUpdateMetaStreamInfo
10938 *
10939 * DESCRIPTION: This function:
10940 * (1) stops all the channels
10941 * (2) returns error on pending requests and buffers
10942 * (3) sends metastream_info in setparams
10943 * (4) starts all channels
10944 * This is useful when sensor has to be restarted to apply any
10945 * settings such as frame rate from a different sensor mode
10946 *
10947 * PARAMETERS : None
10948 *
10949 * RETURN : NO_ERROR on success
10950 * Error codes on failure
10951 *
10952 *==========================================================================*/
10953int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10954{
10955 ATRACE_CALL();
10956 int rc = NO_ERROR;
10957
10958 LOGD("E");
10959
10960 rc = stopAllChannels();
10961 if (rc < 0) {
10962 LOGE("stopAllChannels failed");
10963 return rc;
10964 }
10965
10966 rc = notifyErrorForPendingRequests();
10967 if (rc < 0) {
10968 LOGE("notifyErrorForPendingRequests failed");
10969 return rc;
10970 }
10971
10972 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10973 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10974 "Format:%d",
10975 mStreamConfigInfo.type[i],
10976 mStreamConfigInfo.stream_sizes[i].width,
10977 mStreamConfigInfo.stream_sizes[i].height,
10978 mStreamConfigInfo.postprocess_mask[i],
10979 mStreamConfigInfo.format[i]);
10980 }
10981
10982 /* Send meta stream info once again so that ISP can start */
10983 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10984 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10985 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10986 mParameters);
10987 if (rc < 0) {
10988 LOGE("set Metastreaminfo failed. Sensor mode does not change");
10989 }
10990
10991 rc = startAllChannels();
10992 if (rc < 0) {
10993 LOGE("startAllChannels failed");
10994 return rc;
10995 }
10996
10997 LOGD("X");
10998 return rc;
10999}
11000
11001/*===========================================================================
11002 * FUNCTION : stopAllChannels
11003 *
11004 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
11005 *
11006 * PARAMETERS : None
11007 *
11008 * RETURN : NO_ERROR on success
11009 * Error codes on failure
11010 *
11011 *==========================================================================*/
11012int32_t QCamera3HardwareInterface::stopAllChannels()
11013{
11014 int32_t rc = NO_ERROR;
11015
11016 LOGD("Stopping all channels");
11017 // Stop the Streams/Channels
11018 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11019 it != mStreamInfo.end(); it++) {
11020 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11021 if (channel) {
11022 channel->stop();
11023 }
11024 (*it)->status = INVALID;
11025 }
11026
11027 if (mSupportChannel) {
11028 mSupportChannel->stop();
11029 }
11030 if (mAnalysisChannel) {
11031 mAnalysisChannel->stop();
11032 }
11033 if (mRawDumpChannel) {
11034 mRawDumpChannel->stop();
11035 }
11036 if (mMetadataChannel) {
11037 /* If content of mStreamInfo is not 0, there is metadata stream */
11038 mMetadataChannel->stop();
11039 }
11040
11041 LOGD("All channels stopped");
11042 return rc;
11043}
11044
11045/*===========================================================================
11046 * FUNCTION : startAllChannels
11047 *
11048 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
11049 *
11050 * PARAMETERS : None
11051 *
11052 * RETURN : NO_ERROR on success
11053 * Error codes on failure
11054 *
11055 *==========================================================================*/
11056int32_t QCamera3HardwareInterface::startAllChannels()
11057{
11058 int32_t rc = NO_ERROR;
11059
11060 LOGD("Start all channels ");
11061 // Start the Streams/Channels
11062 if (mMetadataChannel) {
11063 /* If content of mStreamInfo is not 0, there is metadata stream */
11064 rc = mMetadataChannel->start();
11065 if (rc < 0) {
11066 LOGE("META channel start failed");
11067 return rc;
11068 }
11069 }
11070 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11071 it != mStreamInfo.end(); it++) {
11072 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11073 if (channel) {
11074 rc = channel->start();
11075 if (rc < 0) {
11076 LOGE("channel start failed");
11077 return rc;
11078 }
11079 }
11080 }
11081 if (mAnalysisChannel) {
11082 mAnalysisChannel->start();
11083 }
11084 if (mSupportChannel) {
11085 rc = mSupportChannel->start();
11086 if (rc < 0) {
11087 LOGE("Support channel start failed");
11088 return rc;
11089 }
11090 }
11091 if (mRawDumpChannel) {
11092 rc = mRawDumpChannel->start();
11093 if (rc < 0) {
11094 LOGE("RAW dump channel start failed");
11095 return rc;
11096 }
11097 }
11098
11099 LOGD("All channels started");
11100 return rc;
11101}
11102
11103/*===========================================================================
11104 * FUNCTION : notifyErrorForPendingRequests
11105 *
11106 * DESCRIPTION: This function sends error for all the pending requests/buffers
11107 *
11108 * PARAMETERS : None
11109 *
11110 * RETURN : Error codes
11111 * NO_ERROR on success
11112 *
11113 *==========================================================================*/
11114int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
11115{
11116 int32_t rc = NO_ERROR;
11117 unsigned int frameNum = 0;
11118 camera3_capture_result_t result;
11119 camera3_stream_buffer_t *pStream_Buf = NULL;
11120
11121 memset(&result, 0, sizeof(camera3_capture_result_t));
11122
11123 if (mPendingRequestsList.size() > 0) {
11124 pendingRequestIterator i = mPendingRequestsList.begin();
11125 frameNum = i->frame_number;
11126 } else {
11127 /* There might still be pending buffers even though there are
11128 no pending requests. Setting the frameNum to MAX so that
11129 all the buffers with smaller frame numbers are returned */
11130 frameNum = UINT_MAX;
11131 }
11132
11133 LOGH("Oldest frame num on mPendingRequestsList = %u",
11134 frameNum);
11135
11136 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
11137 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
11138
11139 if (req->frame_number < frameNum) {
11140 // Send Error notify to frameworks for each buffer for which
11141 // metadata buffer is already sent
11142 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
11143 req->frame_number, req->mPendingBufferList.size());
11144
11145 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11146 if (NULL == pStream_Buf) {
11147 LOGE("No memory for pending buffers array");
11148 return NO_MEMORY;
11149 }
11150 memset(pStream_Buf, 0,
11151 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11152 result.result = NULL;
11153 result.frame_number = req->frame_number;
11154 result.num_output_buffers = req->mPendingBufferList.size();
11155 result.output_buffers = pStream_Buf;
11156
11157 size_t index = 0;
11158 for (auto info = req->mPendingBufferList.begin();
11159 info != req->mPendingBufferList.end(); ) {
11160
11161 camera3_notify_msg_t notify_msg;
11162 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11163 notify_msg.type = CAMERA3_MSG_ERROR;
11164 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
11165 notify_msg.message.error.error_stream = info->stream;
11166 notify_msg.message.error.frame_number = req->frame_number;
11167 pStream_Buf[index].acquire_fence = -1;
11168 pStream_Buf[index].release_fence = -1;
11169 pStream_Buf[index].buffer = info->buffer;
11170 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11171 pStream_Buf[index].stream = info->stream;
11172 mCallbackOps->notify(mCallbackOps, &notify_msg);
11173 index++;
11174 // Remove buffer from list
11175 info = req->mPendingBufferList.erase(info);
11176 }
11177
11178 // Remove this request from Map
11179 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11180 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11181 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11182
11183 mCallbackOps->process_capture_result(mCallbackOps, &result);
11184
11185 delete [] pStream_Buf;
11186 } else {
11187
11188 // Go through the pending requests info and send error request to framework
11189 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
11190
11191 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
11192
11193 // Send error notify to frameworks
11194 camera3_notify_msg_t notify_msg;
11195 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11196 notify_msg.type = CAMERA3_MSG_ERROR;
11197 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
11198 notify_msg.message.error.error_stream = NULL;
11199 notify_msg.message.error.frame_number = req->frame_number;
11200 mCallbackOps->notify(mCallbackOps, &notify_msg);
11201
11202 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11203 if (NULL == pStream_Buf) {
11204 LOGE("No memory for pending buffers array");
11205 return NO_MEMORY;
11206 }
11207 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11208
11209 result.result = NULL;
11210 result.frame_number = req->frame_number;
11211 result.input_buffer = i->input_buffer;
11212 result.num_output_buffers = req->mPendingBufferList.size();
11213 result.output_buffers = pStream_Buf;
11214
11215 size_t index = 0;
11216 for (auto info = req->mPendingBufferList.begin();
11217 info != req->mPendingBufferList.end(); ) {
11218 pStream_Buf[index].acquire_fence = -1;
11219 pStream_Buf[index].release_fence = -1;
11220 pStream_Buf[index].buffer = info->buffer;
11221 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11222 pStream_Buf[index].stream = info->stream;
11223 index++;
11224 // Remove buffer from list
11225 info = req->mPendingBufferList.erase(info);
11226 }
11227
11228 // Remove this request from Map
11229 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11230 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11231 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11232
11233 mCallbackOps->process_capture_result(mCallbackOps, &result);
11234 delete [] pStream_Buf;
11235 i = erasePendingRequest(i);
11236 }
11237 }
11238
11239 /* Reset pending frame Drop list and requests list */
11240 mPendingFrameDropList.clear();
11241
11242 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11243 req.mPendingBufferList.clear();
11244 }
11245 mPendingBuffersMap.mPendingBuffersInRequest.clear();
11246 mPendingReprocessResultList.clear();
11247 LOGH("Cleared all the pending buffers ");
11248
11249 return rc;
11250}
11251
11252bool QCamera3HardwareInterface::isOnEncoder(
11253 const cam_dimension_t max_viewfinder_size,
11254 uint32_t width, uint32_t height)
11255{
11256 return (width > (uint32_t)max_viewfinder_size.width ||
11257 height > (uint32_t)max_viewfinder_size.height);
11258}
11259
11260/*===========================================================================
11261 * FUNCTION : setBundleInfo
11262 *
11263 * DESCRIPTION: Set bundle info for all streams that are bundle.
11264 *
11265 * PARAMETERS : None
11266 *
11267 * RETURN : NO_ERROR on success
11268 * Error codes on failure
11269 *==========================================================================*/
11270int32_t QCamera3HardwareInterface::setBundleInfo()
11271{
11272 int32_t rc = NO_ERROR;
11273
11274 if (mChannelHandle) {
11275 cam_bundle_config_t bundleInfo;
11276 memset(&bundleInfo, 0, sizeof(bundleInfo));
11277 rc = mCameraHandle->ops->get_bundle_info(
11278 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
11279 if (rc != NO_ERROR) {
11280 LOGE("get_bundle_info failed");
11281 return rc;
11282 }
11283 if (mAnalysisChannel) {
11284 mAnalysisChannel->setBundleInfo(bundleInfo);
11285 }
11286 if (mSupportChannel) {
11287 mSupportChannel->setBundleInfo(bundleInfo);
11288 }
11289 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11290 it != mStreamInfo.end(); it++) {
11291 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11292 channel->setBundleInfo(bundleInfo);
11293 }
11294 if (mRawDumpChannel) {
11295 mRawDumpChannel->setBundleInfo(bundleInfo);
11296 }
11297 }
11298
11299 return rc;
11300}
11301
11302/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011303 * FUNCTION : setInstantAEC
11304 *
11305 * DESCRIPTION: Set Instant AEC related params.
11306 *
11307 * PARAMETERS :
11308 * @meta: CameraMetadata reference
11309 *
11310 * RETURN : NO_ERROR on success
11311 * Error codes on failure
11312 *==========================================================================*/
11313int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
11314{
11315 int32_t rc = NO_ERROR;
11316 uint8_t val = 0;
11317 char prop[PROPERTY_VALUE_MAX];
11318
11319 // First try to configure instant AEC from framework metadata
11320 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
11321 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
11322 }
11323
11324 // If framework did not set this value, try to read from set prop.
11325 if (val == 0) {
11326 memset(prop, 0, sizeof(prop));
11327 property_get("persist.camera.instant.aec", prop, "0");
11328 val = (uint8_t)atoi(prop);
11329 }
11330
11331 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
11332 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
11333 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
11334 mInstantAEC = val;
11335 mInstantAECSettledFrameNumber = 0;
11336 mInstantAecFrameIdxCount = 0;
11337 LOGH("instantAEC value set %d",val);
11338 if (mInstantAEC) {
11339 memset(prop, 0, sizeof(prop));
11340 property_get("persist.camera.ae.instant.bound", prop, "10");
11341 int32_t aec_frame_skip_cnt = atoi(prop);
11342 if (aec_frame_skip_cnt >= 0) {
11343 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
11344 } else {
11345 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
11346 rc = BAD_VALUE;
11347 }
11348 }
11349 } else {
11350 LOGE("Bad instant aec value set %d", val);
11351 rc = BAD_VALUE;
11352 }
11353 return rc;
11354}
11355
11356/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 * FUNCTION : get_num_overall_buffers
11358 *
11359 * DESCRIPTION: Estimate number of pending buffers across all requests.
11360 *
11361 * PARAMETERS : None
11362 *
11363 * RETURN : Number of overall pending buffers
11364 *
11365 *==========================================================================*/
11366uint32_t PendingBuffersMap::get_num_overall_buffers()
11367{
11368 uint32_t sum_buffers = 0;
11369 for (auto &req : mPendingBuffersInRequest) {
11370 sum_buffers += req.mPendingBufferList.size();
11371 }
11372 return sum_buffers;
11373}
11374
11375/*===========================================================================
11376 * FUNCTION : removeBuf
11377 *
11378 * DESCRIPTION: Remove a matching buffer from tracker.
11379 *
11380 * PARAMETERS : @buffer: image buffer for the callback
11381 *
11382 * RETURN : None
11383 *
11384 *==========================================================================*/
11385void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
11386{
11387 bool buffer_found = false;
11388 for (auto req = mPendingBuffersInRequest.begin();
11389 req != mPendingBuffersInRequest.end(); req++) {
11390 for (auto k = req->mPendingBufferList.begin();
11391 k != req->mPendingBufferList.end(); k++ ) {
11392 if (k->buffer == buffer) {
11393 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
11394 req->frame_number, buffer);
11395 k = req->mPendingBufferList.erase(k);
11396 if (req->mPendingBufferList.empty()) {
11397 // Remove this request from Map
11398 req = mPendingBuffersInRequest.erase(req);
11399 }
11400 buffer_found = true;
11401 break;
11402 }
11403 }
11404 if (buffer_found) {
11405 break;
11406 }
11407 }
11408 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11409 get_num_overall_buffers());
11410}
11411
11412/*===========================================================================
11413 * FUNCTION : setPAAFSupport
11414 *
11415 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11416 * feature mask according to stream type and filter
11417 * arrangement
11418 *
11419 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11420 * @stream_type: stream type
11421 * @filter_arrangement: filter arrangement
11422 *
11423 * RETURN : None
11424 *==========================================================================*/
11425void QCamera3HardwareInterface::setPAAFSupport(
11426 cam_feature_mask_t& feature_mask,
11427 cam_stream_type_t stream_type,
11428 cam_color_filter_arrangement_t filter_arrangement)
11429{
11430 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11431 feature_mask, stream_type, filter_arrangement);
11432
11433 switch (filter_arrangement) {
11434 case CAM_FILTER_ARRANGEMENT_RGGB:
11435 case CAM_FILTER_ARRANGEMENT_GRBG:
11436 case CAM_FILTER_ARRANGEMENT_GBRG:
11437 case CAM_FILTER_ARRANGEMENT_BGGR:
11438 if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
11439 (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11440 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11441 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11442 }
11443 break;
11444 case CAM_FILTER_ARRANGEMENT_Y:
11445 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11446 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11447 }
11448 break;
11449 default:
11450 break;
11451 }
11452}
11453
11454/*===========================================================================
11455* FUNCTION : getSensorMountAngle
11456*
11457* DESCRIPTION: Retrieve sensor mount angle
11458*
11459* PARAMETERS : None
11460*
11461* RETURN : sensor mount angle in uint32_t
11462*==========================================================================*/
11463uint32_t QCamera3HardwareInterface::getSensorMountAngle()
11464{
11465 return gCamCapability[mCameraId]->sensor_mount_angle;
11466}
11467
11468/*===========================================================================
11469* FUNCTION : getRelatedCalibrationData
11470*
11471* DESCRIPTION: Retrieve related system calibration data
11472*
11473* PARAMETERS : None
11474*
11475* RETURN : Pointer of related system calibration data
11476*==========================================================================*/
11477const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
11478{
11479 return (const cam_related_system_calibration_data_t *)
11480 &(gCamCapability[mCameraId]->related_cam_calibration);
11481}
11482}; //end namespace qcamera