blob: 03fdf419de6c33d412edf80cffb87fab27248c2c [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080047#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070048
49// Display dependencies
50#include "qdMetaData.h"
51
52// Camera dependencies
53#include "android/QCamera3External.h"
54#include "util/QCameraFlash.h"
55#include "QCamera3HWI.h"
56#include "QCamera3VendorTags.h"
57#include "QCameraTrace.h"
58
59extern "C" {
60#include "mm_camera_dbg.h"
61}
62
63using namespace android;
64
65namespace qcamera {
66
67#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
68
69#define EMPTY_PIPELINE_DELAY 2
70#define PARTIAL_RESULT_COUNT 2
71#define FRAME_SKIP_DELAY 0
72
73#define MAX_VALUE_8BIT ((1<<8)-1)
74#define MAX_VALUE_10BIT ((1<<10)-1)
75#define MAX_VALUE_12BIT ((1<<12)-1)
76
77#define VIDEO_4K_WIDTH 3840
78#define VIDEO_4K_HEIGHT 2160
79
80#define MAX_EIS_WIDTH 1920
81#define MAX_EIS_HEIGHT 1080
82
83#define MAX_RAW_STREAMS 1
84#define MAX_STALLING_STREAMS 1
85#define MAX_PROCESSED_STREAMS 3
86/* Batch mode is enabled only if FPS set is equal to or greater than this */
87#define MIN_FPS_FOR_BATCH_MODE (120)
88#define PREVIEW_FPS_FOR_HFR (30)
89#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080090#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070091#define MAX_HFR_BATCH_SIZE (8)
92#define REGIONS_TUPLE_COUNT 5
93#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070094// Set a threshold for detection of missing buffers //seconds
95#define MISSING_REQUEST_BUF_TIMEOUT 3
96#define FLUSH_TIMEOUT 3
97#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
98
99#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
100 CAM_QCOM_FEATURE_CROP |\
101 CAM_QCOM_FEATURE_ROTATION |\
102 CAM_QCOM_FEATURE_SHARPNESS |\
103 CAM_QCOM_FEATURE_SCALE |\
104 CAM_QCOM_FEATURE_CAC |\
105 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700106/* Per configuration size for static metadata length*/
107#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700108
109#define TIMEOUT_NEVER -1
110
Thierry Strudel04e026f2016-10-10 11:27:36 -0700111/* Face landmarks indices */
112#define LEFT_EYE_X 0
113#define LEFT_EYE_Y 1
114#define RIGHT_EYE_X 2
115#define RIGHT_EYE_Y 3
116#define MOUTH_X 4
117#define MOUTH_Y 5
118#define TOTAL_LANDMARK_INDICES 6
119
Thierry Strudel3d639192016-09-09 11:52:26 -0700120cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
121const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
122extern pthread_mutex_t gCamLock;
123volatile uint32_t gCamHal3LogLevel = 1;
124extern uint8_t gNumCameraSessions;
125
126const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
127 {"On", CAM_CDS_MODE_ON},
128 {"Off", CAM_CDS_MODE_OFF},
129 {"Auto",CAM_CDS_MODE_AUTO}
130};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700131const QCamera3HardwareInterface::QCameraMap<
132 camera_metadata_enum_android_video_hdr_mode_t,
133 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
134 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
135 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
136};
137
138
139const QCamera3HardwareInterface::QCameraMap<
140 camera_metadata_enum_android_ir_mode_t,
141 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
142 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
143 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
144 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
145};
Thierry Strudel3d639192016-09-09 11:52:26 -0700146
147const QCamera3HardwareInterface::QCameraMap<
148 camera_metadata_enum_android_control_effect_mode_t,
149 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
150 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
151 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
152 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
153 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
155 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
157 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
159};
160
161const QCamera3HardwareInterface::QCameraMap<
162 camera_metadata_enum_android_control_awb_mode_t,
163 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
164 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
165 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
166 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
167 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
168 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
169 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
170 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
172 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
173};
174
175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_control_scene_mode_t,
177 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
178 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
179 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
180 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
181 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
182 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
184 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
185 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
186 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
187 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
188 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
189 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
190 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
191 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
192 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
193 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
194};
195
196const QCamera3HardwareInterface::QCameraMap<
197 camera_metadata_enum_android_control_af_mode_t,
198 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
199 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
201 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
202 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
203 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
204 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
206};
207
208const QCamera3HardwareInterface::QCameraMap<
209 camera_metadata_enum_android_color_correction_aberration_mode_t,
210 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
212 CAM_COLOR_CORRECTION_ABERRATION_OFF },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
214 CAM_COLOR_CORRECTION_ABERRATION_FAST },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
216 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
217};
218
219const QCamera3HardwareInterface::QCameraMap<
220 camera_metadata_enum_android_control_ae_antibanding_mode_t,
221 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
226};
227
228const QCamera3HardwareInterface::QCameraMap<
229 camera_metadata_enum_android_control_ae_mode_t,
230 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
231 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
232 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
234 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
236};
237
238const QCamera3HardwareInterface::QCameraMap<
239 camera_metadata_enum_android_flash_mode_t,
240 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
241 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
242 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
243 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_statistics_face_detect_mode_t,
248 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
252};
253
254const QCamera3HardwareInterface::QCameraMap<
255 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
256 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
258 CAM_FOCUS_UNCALIBRATED },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
260 CAM_FOCUS_APPROXIMATE },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
262 CAM_FOCUS_CALIBRATED }
263};
264
265const QCamera3HardwareInterface::QCameraMap<
266 camera_metadata_enum_android_lens_state_t,
267 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
268 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
269 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
270};
271
272const int32_t available_thumbnail_sizes[] = {0, 0,
273 176, 144,
274 240, 144,
275 256, 144,
276 240, 160,
277 256, 154,
278 240, 240,
279 320, 240};
280
281const QCamera3HardwareInterface::QCameraMap<
282 camera_metadata_enum_android_sensor_test_pattern_mode_t,
283 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
290};
291
292/* Since there is no mapping for all the options some Android enum are not listed.
293 * Also, the order in this list is important because while mapping from HAL to Android it will
294 * traverse from lower to higher index which means that for HAL values that are map to different
295 * Android values, the traverse logic will select the first one found.
296 */
297const QCamera3HardwareInterface::QCameraMap<
298 camera_metadata_enum_android_sensor_reference_illuminant1_t,
299 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
316};
317
318const QCamera3HardwareInterface::QCameraMap<
319 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
320 { 60, CAM_HFR_MODE_60FPS},
321 { 90, CAM_HFR_MODE_90FPS},
322 { 120, CAM_HFR_MODE_120FPS},
323 { 150, CAM_HFR_MODE_150FPS},
324 { 180, CAM_HFR_MODE_180FPS},
325 { 210, CAM_HFR_MODE_210FPS},
326 { 240, CAM_HFR_MODE_240FPS},
327 { 480, CAM_HFR_MODE_480FPS},
328};
329
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700330const QCamera3HardwareInterface::QCameraMap<
331 qcamera3_ext_instant_aec_mode_t,
332 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
333 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
334 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
336};
Thierry Strudel3d639192016-09-09 11:52:26 -0700337camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
338 .initialize = QCamera3HardwareInterface::initialize,
339 .configure_streams = QCamera3HardwareInterface::configure_streams,
340 .register_stream_buffers = NULL,
341 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
342 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
343 .get_metadata_vendor_tag_ops = NULL,
344 .dump = QCamera3HardwareInterface::dump,
345 .flush = QCamera3HardwareInterface::flush,
346 .reserved = {0},
347};
348
349// initialise to some default value
350uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
351
352/*===========================================================================
353 * FUNCTION : QCamera3HardwareInterface
354 *
355 * DESCRIPTION: constructor of QCamera3HardwareInterface
356 *
357 * PARAMETERS :
358 * @cameraId : camera ID
359 *
360 * RETURN : none
361 *==========================================================================*/
362QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
363 const camera_module_callbacks_t *callbacks)
364 : mCameraId(cameraId),
365 mCameraHandle(NULL),
366 mCameraInitialized(false),
367 mCallbackOps(NULL),
368 mMetadataChannel(NULL),
369 mPictureChannel(NULL),
370 mRawChannel(NULL),
371 mSupportChannel(NULL),
372 mAnalysisChannel(NULL),
373 mRawDumpChannel(NULL),
374 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800375 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700376 mCommon(),
377 mChannelHandle(0),
378 mFirstConfiguration(true),
379 mFlush(false),
380 mFlushPerf(false),
381 mParamHeap(NULL),
382 mParameters(NULL),
383 mPrevParameters(NULL),
384 m_bIsVideo(false),
385 m_bIs4KVideo(false),
386 m_bEisSupportedSize(false),
387 m_bEisEnable(false),
388 m_MobicatMask(0),
389 mMinProcessedFrameDuration(0),
390 mMinJpegFrameDuration(0),
391 mMinRawFrameDuration(0),
392 mMetaFrameCount(0U),
393 mUpdateDebugLevel(false),
394 mCallbacks(callbacks),
395 mCaptureIntent(0),
396 mCacMode(0),
397 mBatchSize(0),
398 mToBeQueuedVidBufs(0),
399 mHFRVideoFps(DEFAULT_VIDEO_FPS),
400 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
401 mFirstFrameNumberInBatch(0),
402 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800403 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700404 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
405 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700406 mInstantAEC(false),
407 mResetInstantAEC(false),
408 mInstantAECSettledFrameNumber(0),
409 mAecSkipDisplayFrameBound(0),
410 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700411 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700412 mLastCustIntentFrmNum(-1),
413 mState(CLOSED),
414 mIsDeviceLinked(false),
415 mIsMainCamera(true),
416 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700417 m_pDualCamCmdHeap(NULL),
418 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700419{
420 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mCommon.init(gCamCapability[cameraId]);
422 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700423#ifndef USE_HAL_3_3
424 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
425#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700426 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700427#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700428 mCameraDevice.common.close = close_camera_device;
429 mCameraDevice.ops = &mCameraOps;
430 mCameraDevice.priv = this;
431 gCamCapability[cameraId]->version = CAM_HAL_V3;
432 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
433 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
434 gCamCapability[cameraId]->min_num_pp_bufs = 3;
435
436 pthread_cond_init(&mBuffersCond, NULL);
437
438 pthread_cond_init(&mRequestCond, NULL);
439 mPendingLiveRequest = 0;
440 mCurrentRequestId = -1;
441 pthread_mutex_init(&mMutex, NULL);
442
443 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
444 mDefaultMetadata[i] = NULL;
445
446 // Getting system props of different kinds
447 char prop[PROPERTY_VALUE_MAX];
448 memset(prop, 0, sizeof(prop));
449 property_get("persist.camera.raw.dump", prop, "0");
450 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800451 property_get("persist.camera.hal3.force.hdr", prop, "0");
452 mForceHdrSnapshot = atoi(prop);
453
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 if (mEnableRawDump)
455 LOGD("Raw dump from Camera HAL enabled");
456
457 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
458 memset(mLdafCalib, 0, sizeof(mLdafCalib));
459
460 memset(prop, 0, sizeof(prop));
461 property_get("persist.camera.tnr.preview", prop, "0");
462 m_bTnrPreview = (uint8_t)atoi(prop);
463
464 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800465 property_get("persist.camera.swtnr.preview", prop, "1");
466 m_bSwTnrPreview = (uint8_t)atoi(prop);
467
468 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 property_get("persist.camera.tnr.video", prop, "0");
470 m_bTnrVideo = (uint8_t)atoi(prop);
471
472 memset(prop, 0, sizeof(prop));
473 property_get("persist.camera.avtimer.debug", prop, "0");
474 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800475 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700476
477 //Load and read GPU library.
478 lib_surface_utils = NULL;
479 LINK_get_surface_pixel_alignment = NULL;
480 mSurfaceStridePadding = CAM_PAD_TO_32;
481 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
482 if (lib_surface_utils) {
483 *(void **)&LINK_get_surface_pixel_alignment =
484 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
485 if (LINK_get_surface_pixel_alignment) {
486 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
487 }
488 dlclose(lib_surface_utils);
489 }
490}
491
492/*===========================================================================
493 * FUNCTION : ~QCamera3HardwareInterface
494 *
495 * DESCRIPTION: destructor of QCamera3HardwareInterface
496 *
497 * PARAMETERS : none
498 *
499 * RETURN : none
500 *==========================================================================*/
501QCamera3HardwareInterface::~QCamera3HardwareInterface()
502{
503 LOGD("E");
504
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800505 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700506
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800507 // Disable power hint and enable the perf lock for close camera
508 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
509 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
510
511 // unlink of dualcam during close camera
512 if (mIsDeviceLinked) {
513 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
514 &m_pDualCamCmdPtr->bundle_info;
515 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
516 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
517 pthread_mutex_lock(&gCamLock);
518
519 if (mIsMainCamera == 1) {
520 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
521 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
522 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
523 // related session id should be session id of linked session
524 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
525 } else {
526 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
527 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
528 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
529 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
530 }
531 pthread_mutex_unlock(&gCamLock);
532
533 rc = mCameraHandle->ops->set_dual_cam_cmd(
534 mCameraHandle->camera_handle);
535 if (rc < 0) {
536 LOGE("Dualcam: Unlink failed, but still proceed to close");
537 }
538 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700539
540 /* We need to stop all streams before deleting any stream */
541 if (mRawDumpChannel) {
542 mRawDumpChannel->stop();
543 }
544
545 // NOTE: 'camera3_stream_t *' objects are already freed at
546 // this stage by the framework
547 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
548 it != mStreamInfo.end(); it++) {
549 QCamera3ProcessingChannel *channel = (*it)->channel;
550 if (channel) {
551 channel->stop();
552 }
553 }
554 if (mSupportChannel)
555 mSupportChannel->stop();
556
557 if (mAnalysisChannel) {
558 mAnalysisChannel->stop();
559 }
560 if (mMetadataChannel) {
561 mMetadataChannel->stop();
562 }
563 if (mChannelHandle) {
564 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
565 mChannelHandle);
566 LOGD("stopping channel %d", mChannelHandle);
567 }
568
569 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
570 it != mStreamInfo.end(); it++) {
571 QCamera3ProcessingChannel *channel = (*it)->channel;
572 if (channel)
573 delete channel;
574 free (*it);
575 }
576 if (mSupportChannel) {
577 delete mSupportChannel;
578 mSupportChannel = NULL;
579 }
580
581 if (mAnalysisChannel) {
582 delete mAnalysisChannel;
583 mAnalysisChannel = NULL;
584 }
585 if (mRawDumpChannel) {
586 delete mRawDumpChannel;
587 mRawDumpChannel = NULL;
588 }
589 if (mDummyBatchChannel) {
590 delete mDummyBatchChannel;
591 mDummyBatchChannel = NULL;
592 }
593
594 mPictureChannel = NULL;
595
596 if (mMetadataChannel) {
597 delete mMetadataChannel;
598 mMetadataChannel = NULL;
599 }
600
601 /* Clean up all channels */
602 if (mCameraInitialized) {
603 if(!mFirstConfiguration){
604 //send the last unconfigure
605 cam_stream_size_info_t stream_config_info;
606 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
607 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
608 stream_config_info.buffer_info.max_buffers =
609 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700610 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700611 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
612 stream_config_info);
613 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
614 if (rc < 0) {
615 LOGE("set_parms failed for unconfigure");
616 }
617 }
618 deinitParameters();
619 }
620
621 if (mChannelHandle) {
622 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
623 mChannelHandle);
624 LOGH("deleting channel %d", mChannelHandle);
625 mChannelHandle = 0;
626 }
627
628 if (mState != CLOSED)
629 closeCamera();
630
631 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
632 req.mPendingBufferList.clear();
633 }
634 mPendingBuffersMap.mPendingBuffersInRequest.clear();
635 mPendingReprocessResultList.clear();
636 for (pendingRequestIterator i = mPendingRequestsList.begin();
637 i != mPendingRequestsList.end();) {
638 i = erasePendingRequest(i);
639 }
640 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
641 if (mDefaultMetadata[i])
642 free_camera_metadata(mDefaultMetadata[i]);
643
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800644 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 pthread_cond_destroy(&mRequestCond);
647
648 pthread_cond_destroy(&mBuffersCond);
649
650 pthread_mutex_destroy(&mMutex);
651 LOGD("X");
652}
653
654/*===========================================================================
655 * FUNCTION : erasePendingRequest
656 *
657 * DESCRIPTION: function to erase a desired pending request after freeing any
658 * allocated memory
659 *
660 * PARAMETERS :
661 * @i : iterator pointing to pending request to be erased
662 *
663 * RETURN : iterator pointing to the next request
664 *==========================================================================*/
665QCamera3HardwareInterface::pendingRequestIterator
666 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
667{
668 if (i->input_buffer != NULL) {
669 free(i->input_buffer);
670 i->input_buffer = NULL;
671 }
672 if (i->settings != NULL)
673 free_camera_metadata((camera_metadata_t*)i->settings);
674 return mPendingRequestsList.erase(i);
675}
676
677/*===========================================================================
678 * FUNCTION : camEvtHandle
679 *
680 * DESCRIPTION: Function registered to mm-camera-interface to handle events
681 *
682 * PARAMETERS :
683 * @camera_handle : interface layer camera handle
684 * @evt : ptr to event
685 * @user_data : user data ptr
686 *
687 * RETURN : none
688 *==========================================================================*/
689void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
690 mm_camera_event_t *evt,
691 void *user_data)
692{
693 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
694 if (obj && evt) {
695 switch(evt->server_event_type) {
696 case CAM_EVENT_TYPE_DAEMON_DIED:
697 pthread_mutex_lock(&obj->mMutex);
698 obj->mState = ERROR;
699 pthread_mutex_unlock(&obj->mMutex);
700 LOGE("Fatal, camera daemon died");
701 break;
702
703 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
704 LOGD("HAL got request pull from Daemon");
705 pthread_mutex_lock(&obj->mMutex);
706 obj->mWokenUpByDaemon = true;
707 obj->unblockRequestIfNecessary();
708 pthread_mutex_unlock(&obj->mMutex);
709 break;
710
711 default:
712 LOGW("Warning: Unhandled event %d",
713 evt->server_event_type);
714 break;
715 }
716 } else {
717 LOGE("NULL user_data/evt");
718 }
719}
720
721/*===========================================================================
722 * FUNCTION : openCamera
723 *
724 * DESCRIPTION: open camera
725 *
726 * PARAMETERS :
727 * @hw_device : double ptr for camera device struct
728 *
729 * RETURN : int32_t type of status
730 * NO_ERROR -- success
731 * none-zero failure code
732 *==========================================================================*/
733int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
734{
735 int rc = 0;
736 if (mState != CLOSED) {
737 *hw_device = NULL;
738 return PERMISSION_DENIED;
739 }
740
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800741 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700742 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
743 mCameraId);
744
745 rc = openCamera();
746 if (rc == 0) {
747 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800748 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700749 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800750 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700751
Thierry Strudel3d639192016-09-09 11:52:26 -0700752 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
753 mCameraId, rc);
754
755 if (rc == NO_ERROR) {
756 mState = OPENED;
757 }
758 return rc;
759}
760
761/*===========================================================================
762 * FUNCTION : openCamera
763 *
764 * DESCRIPTION: open camera
765 *
766 * PARAMETERS : none
767 *
768 * RETURN : int32_t type of status
769 * NO_ERROR -- success
770 * none-zero failure code
771 *==========================================================================*/
772int QCamera3HardwareInterface::openCamera()
773{
774 int rc = 0;
775 char value[PROPERTY_VALUE_MAX];
776
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800777 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700778 if (mCameraHandle) {
779 LOGE("Failure: Camera already opened");
780 return ALREADY_EXISTS;
781 }
782
783 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
784 if (rc < 0) {
785 LOGE("Failed to reserve flash for camera id: %d",
786 mCameraId);
787 return UNKNOWN_ERROR;
788 }
789
790 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
791 if (rc) {
792 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
793 return rc;
794 }
795
796 if (!mCameraHandle) {
797 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
798 return -ENODEV;
799 }
800
801 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
802 camEvtHandle, (void *)this);
803
804 if (rc < 0) {
805 LOGE("Error, failed to register event callback");
806 /* Not closing camera here since it is already handled in destructor */
807 return FAILED_TRANSACTION;
808 }
809
810 mExifParams.debug_params =
811 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
812 if (mExifParams.debug_params) {
813 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
814 } else {
815 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
816 return NO_MEMORY;
817 }
818 mFirstConfiguration = true;
819
820 //Notify display HAL that a camera session is active.
821 //But avoid calling the same during bootup because camera service might open/close
822 //cameras at boot time during its initialization and display service will also internally
823 //wait for camera service to initialize first while calling this display API, resulting in a
824 //deadlock situation. Since boot time camera open/close calls are made only to fetch
825 //capabilities, no need of this display bw optimization.
826 //Use "service.bootanim.exit" property to know boot status.
827 property_get("service.bootanim.exit", value, "0");
828 if (atoi(value) == 1) {
829 pthread_mutex_lock(&gCamLock);
830 if (gNumCameraSessions++ == 0) {
831 setCameraLaunchStatus(true);
832 }
833 pthread_mutex_unlock(&gCamLock);
834 }
835
836 //fill the session id needed while linking dual cam
837 pthread_mutex_lock(&gCamLock);
838 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
839 &sessionId[mCameraId]);
840 pthread_mutex_unlock(&gCamLock);
841
842 if (rc < 0) {
843 LOGE("Error, failed to get sessiion id");
844 return UNKNOWN_ERROR;
845 } else {
846 //Allocate related cam sync buffer
847 //this is needed for the payload that goes along with bundling cmd for related
848 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700849 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
850 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700851 if(rc != OK) {
852 rc = NO_MEMORY;
853 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
854 return NO_MEMORY;
855 }
856
857 //Map memory for related cam sync buffer
858 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700859 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
860 m_pDualCamCmdHeap->getFd(0),
861 sizeof(cam_dual_camera_cmd_info_t),
862 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 if(rc < 0) {
864 LOGE("Dualcam: failed to map Related cam sync buffer");
865 rc = FAILED_TRANSACTION;
866 return NO_MEMORY;
867 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700868 m_pDualCamCmdPtr =
869 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700870 }
871
872 LOGH("mCameraId=%d",mCameraId);
873
874 return NO_ERROR;
875}
876
877/*===========================================================================
878 * FUNCTION : closeCamera
879 *
880 * DESCRIPTION: close camera
881 *
882 * PARAMETERS : none
883 *
884 * RETURN : int32_t type of status
885 * NO_ERROR -- success
886 * none-zero failure code
887 *==========================================================================*/
888int QCamera3HardwareInterface::closeCamera()
889{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800890 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 int rc = NO_ERROR;
892 char value[PROPERTY_VALUE_MAX];
893
894 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
895 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700896
897 // unmap memory for related cam sync buffer
898 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800899 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700900 if (NULL != m_pDualCamCmdHeap) {
901 m_pDualCamCmdHeap->deallocate();
902 delete m_pDualCamCmdHeap;
903 m_pDualCamCmdHeap = NULL;
904 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700905 }
906
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
908 mCameraHandle = NULL;
909
910 //reset session id to some invalid id
911 pthread_mutex_lock(&gCamLock);
912 sessionId[mCameraId] = 0xDEADBEEF;
913 pthread_mutex_unlock(&gCamLock);
914
915 //Notify display HAL that there is no active camera session
916 //but avoid calling the same during bootup. Refer to openCamera
917 //for more details.
918 property_get("service.bootanim.exit", value, "0");
919 if (atoi(value) == 1) {
920 pthread_mutex_lock(&gCamLock);
921 if (--gNumCameraSessions == 0) {
922 setCameraLaunchStatus(false);
923 }
924 pthread_mutex_unlock(&gCamLock);
925 }
926
Thierry Strudel3d639192016-09-09 11:52:26 -0700927 if (mExifParams.debug_params) {
928 free(mExifParams.debug_params);
929 mExifParams.debug_params = NULL;
930 }
931 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
932 LOGW("Failed to release flash for camera id: %d",
933 mCameraId);
934 }
935 mState = CLOSED;
936 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
937 mCameraId, rc);
938 return rc;
939}
940
941/*===========================================================================
942 * FUNCTION : initialize
943 *
944 * DESCRIPTION: Initialize frameworks callback functions
945 *
946 * PARAMETERS :
947 * @callback_ops : callback function to frameworks
948 *
949 * RETURN :
950 *
951 *==========================================================================*/
952int QCamera3HardwareInterface::initialize(
953 const struct camera3_callback_ops *callback_ops)
954{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800955 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700956 int rc;
957
958 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
959 pthread_mutex_lock(&mMutex);
960
961 // Validate current state
962 switch (mState) {
963 case OPENED:
964 /* valid state */
965 break;
966 default:
967 LOGE("Invalid state %d", mState);
968 rc = -ENODEV;
969 goto err1;
970 }
971
972 rc = initParameters();
973 if (rc < 0) {
974 LOGE("initParamters failed %d", rc);
975 goto err1;
976 }
977 mCallbackOps = callback_ops;
978
979 mChannelHandle = mCameraHandle->ops->add_channel(
980 mCameraHandle->camera_handle, NULL, NULL, this);
981 if (mChannelHandle == 0) {
982 LOGE("add_channel failed");
983 rc = -ENOMEM;
984 pthread_mutex_unlock(&mMutex);
985 return rc;
986 }
987
988 pthread_mutex_unlock(&mMutex);
989 mCameraInitialized = true;
990 mState = INITIALIZED;
991 LOGI("X");
992 return 0;
993
994err1:
995 pthread_mutex_unlock(&mMutex);
996 return rc;
997}
998
999/*===========================================================================
1000 * FUNCTION : validateStreamDimensions
1001 *
1002 * DESCRIPTION: Check if the configuration requested are those advertised
1003 *
1004 * PARAMETERS :
1005 * @stream_list : streams to be configured
1006 *
1007 * RETURN :
1008 *
1009 *==========================================================================*/
1010int QCamera3HardwareInterface::validateStreamDimensions(
1011 camera3_stream_configuration_t *streamList)
1012{
1013 int rc = NO_ERROR;
1014 size_t count = 0;
1015
1016 camera3_stream_t *inputStream = NULL;
1017 /*
1018 * Loop through all streams to find input stream if it exists*
1019 */
1020 for (size_t i = 0; i< streamList->num_streams; i++) {
1021 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1022 if (inputStream != NULL) {
1023 LOGE("Error, Multiple input streams requested");
1024 return -EINVAL;
1025 }
1026 inputStream = streamList->streams[i];
1027 }
1028 }
1029 /*
1030 * Loop through all streams requested in configuration
1031 * Check if unsupported sizes have been requested on any of them
1032 */
1033 for (size_t j = 0; j < streamList->num_streams; j++) {
1034 bool sizeFound = false;
1035 camera3_stream_t *newStream = streamList->streams[j];
1036
1037 uint32_t rotatedHeight = newStream->height;
1038 uint32_t rotatedWidth = newStream->width;
1039 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1040 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1041 rotatedHeight = newStream->width;
1042 rotatedWidth = newStream->height;
1043 }
1044
1045 /*
1046 * Sizes are different for each type of stream format check against
1047 * appropriate table.
1048 */
1049 switch (newStream->format) {
1050 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1051 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1052 case HAL_PIXEL_FORMAT_RAW10:
1053 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1054 for (size_t i = 0; i < count; i++) {
1055 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1056 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1057 sizeFound = true;
1058 break;
1059 }
1060 }
1061 break;
1062 case HAL_PIXEL_FORMAT_BLOB:
1063 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1064 /* Verify set size against generated sizes table */
1065 for (size_t i = 0; i < count; i++) {
1066 if (((int32_t)rotatedWidth ==
1067 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1068 ((int32_t)rotatedHeight ==
1069 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1070 sizeFound = true;
1071 break;
1072 }
1073 }
1074 break;
1075 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1076 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1077 default:
1078 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1079 || newStream->stream_type == CAMERA3_STREAM_INPUT
1080 || IS_USAGE_ZSL(newStream->usage)) {
1081 if (((int32_t)rotatedWidth ==
1082 gCamCapability[mCameraId]->active_array_size.width) &&
1083 ((int32_t)rotatedHeight ==
1084 gCamCapability[mCameraId]->active_array_size.height)) {
1085 sizeFound = true;
1086 break;
1087 }
1088 /* We could potentially break here to enforce ZSL stream
1089 * set from frameworks always is full active array size
1090 * but it is not clear from the spc if framework will always
1091 * follow that, also we have logic to override to full array
1092 * size, so keeping the logic lenient at the moment
1093 */
1094 }
1095 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1096 MAX_SIZES_CNT);
1097 for (size_t i = 0; i < count; i++) {
1098 if (((int32_t)rotatedWidth ==
1099 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1100 ((int32_t)rotatedHeight ==
1101 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1102 sizeFound = true;
1103 break;
1104 }
1105 }
1106 break;
1107 } /* End of switch(newStream->format) */
1108
1109 /* We error out even if a single stream has unsupported size set */
1110 if (!sizeFound) {
1111 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1112 rotatedWidth, rotatedHeight, newStream->format,
1113 gCamCapability[mCameraId]->active_array_size.width,
1114 gCamCapability[mCameraId]->active_array_size.height);
1115 rc = -EINVAL;
1116 break;
1117 }
1118 } /* End of for each stream */
1119 return rc;
1120}
1121
1122/*==============================================================================
1123 * FUNCTION : isSupportChannelNeeded
1124 *
1125 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1126 *
1127 * PARAMETERS :
1128 * @stream_list : streams to be configured
1129 * @stream_config_info : the config info for streams to be configured
1130 *
1131 * RETURN : Boolen true/false decision
1132 *
1133 *==========================================================================*/
1134bool QCamera3HardwareInterface::isSupportChannelNeeded(
1135 camera3_stream_configuration_t *streamList,
1136 cam_stream_size_info_t stream_config_info)
1137{
1138 uint32_t i;
1139 bool pprocRequested = false;
1140 /* Check for conditions where PProc pipeline does not have any streams*/
1141 for (i = 0; i < stream_config_info.num_streams; i++) {
1142 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1143 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1144 pprocRequested = true;
1145 break;
1146 }
1147 }
1148
1149 if (pprocRequested == false )
1150 return true;
1151
1152 /* Dummy stream needed if only raw or jpeg streams present */
1153 for (i = 0; i < streamList->num_streams; i++) {
1154 switch(streamList->streams[i]->format) {
1155 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1156 case HAL_PIXEL_FORMAT_RAW10:
1157 case HAL_PIXEL_FORMAT_RAW16:
1158 case HAL_PIXEL_FORMAT_BLOB:
1159 break;
1160 default:
1161 return false;
1162 }
1163 }
1164 return true;
1165}
1166
1167/*==============================================================================
1168 * FUNCTION : getSensorOutputSize
1169 *
1170 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1171 *
1172 * PARAMETERS :
1173 * @sensor_dim : sensor output dimension (output)
1174 *
1175 * RETURN : int32_t type of status
1176 * NO_ERROR -- success
1177 * none-zero failure code
1178 *
1179 *==========================================================================*/
1180int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1181{
1182 int32_t rc = NO_ERROR;
1183
1184 cam_dimension_t max_dim = {0, 0};
1185 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1186 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1187 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1188 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1189 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1190 }
1191
1192 clear_metadata_buffer(mParameters);
1193
1194 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1195 max_dim);
1196 if (rc != NO_ERROR) {
1197 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1198 return rc;
1199 }
1200
1201 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1202 if (rc != NO_ERROR) {
1203 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1204 return rc;
1205 }
1206
1207 clear_metadata_buffer(mParameters);
1208 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1209
1210 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1211 mParameters);
1212 if (rc != NO_ERROR) {
1213 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1214 return rc;
1215 }
1216
1217 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1218 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1219
1220 return rc;
1221}
1222
1223/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001224 * FUNCTION : addToPPFeatureMask
1225 *
1226 * DESCRIPTION: add additional features to pp feature mask based on
1227 * stream type and usecase
1228 *
1229 * PARAMETERS :
1230 * @stream_format : stream type for feature mask
1231 * @stream_idx : stream idx within postprocess_mask list to change
1232 *
1233 * RETURN : NULL
1234 *
1235 *==========================================================================*/
1236void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1237 uint32_t stream_idx)
1238{
1239 char feature_mask_value[PROPERTY_VALUE_MAX];
1240 cam_feature_mask_t feature_mask;
1241 int args_converted;
1242 int property_len;
1243
1244 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001245#ifdef _LE_CAMERA_
1246 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1247 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1248 property_len = property_get("persist.camera.hal3.feature",
1249 feature_mask_value, swtnr_feature_mask_value);
1250#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001251 property_len = property_get("persist.camera.hal3.feature",
1252 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001253#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1255 (feature_mask_value[1] == 'x')) {
1256 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1257 } else {
1258 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1259 }
1260 if (1 != args_converted) {
1261 feature_mask = 0;
1262 LOGE("Wrong feature mask %s", feature_mask_value);
1263 return;
1264 }
1265
1266 switch (stream_format) {
1267 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1268 /* Add LLVD to pp feature mask only if video hint is enabled */
1269 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1270 mStreamConfigInfo.postprocess_mask[stream_idx]
1271 |= CAM_QTI_FEATURE_SW_TNR;
1272 LOGH("Added SW TNR to pp feature mask");
1273 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1274 mStreamConfigInfo.postprocess_mask[stream_idx]
1275 |= CAM_QCOM_FEATURE_LLVD;
1276 LOGH("Added LLVD SeeMore to pp feature mask");
1277 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001278 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1279 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1280 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1281 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001282 break;
1283 }
1284 default:
1285 break;
1286 }
1287 LOGD("PP feature mask %llx",
1288 mStreamConfigInfo.postprocess_mask[stream_idx]);
1289}
1290
1291/*==============================================================================
1292 * FUNCTION : updateFpsInPreviewBuffer
1293 *
1294 * DESCRIPTION: update FPS information in preview buffer.
1295 *
1296 * PARAMETERS :
1297 * @metadata : pointer to metadata buffer
1298 * @frame_number: frame_number to look for in pending buffer list
1299 *
1300 * RETURN : None
1301 *
1302 *==========================================================================*/
1303void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1304 uint32_t frame_number)
1305{
1306 // Mark all pending buffers for this particular request
1307 // with corresponding framerate information
1308 for (List<PendingBuffersInRequest>::iterator req =
1309 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1310 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1311 for(List<PendingBufferInfo>::iterator j =
1312 req->mPendingBufferList.begin();
1313 j != req->mPendingBufferList.end(); j++) {
1314 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1315 if ((req->frame_number == frame_number) &&
1316 (channel->getStreamTypeMask() &
1317 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1318 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1319 CAM_INTF_PARM_FPS_RANGE, metadata) {
1320 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1321 struct private_handle_t *priv_handle =
1322 (struct private_handle_t *)(*(j->buffer));
1323 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1324 }
1325 }
1326 }
1327 }
1328}
1329
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001330/*==============================================================================
1331 * FUNCTION : updateTimeStampInPendingBuffers
1332 *
1333 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1334 * of a frame number
1335 *
1336 * PARAMETERS :
1337 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1338 * @timestamp : timestamp to be set
1339 *
1340 * RETURN : None
1341 *
1342 *==========================================================================*/
1343void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1344 uint32_t frameNumber, nsecs_t timestamp)
1345{
1346 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1347 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1348 if (req->frame_number != frameNumber)
1349 continue;
1350
1351 for (auto k = req->mPendingBufferList.begin();
1352 k != req->mPendingBufferList.end(); k++ ) {
1353 struct private_handle_t *priv_handle =
1354 (struct private_handle_t *) (*(k->buffer));
1355 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1356 }
1357 }
1358 return;
1359}
1360
Thierry Strudel3d639192016-09-09 11:52:26 -07001361/*===========================================================================
1362 * FUNCTION : configureStreams
1363 *
1364 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1365 * and output streams.
1366 *
1367 * PARAMETERS :
1368 * @stream_list : streams to be configured
1369 *
1370 * RETURN :
1371 *
1372 *==========================================================================*/
1373int QCamera3HardwareInterface::configureStreams(
1374 camera3_stream_configuration_t *streamList)
1375{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001376 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001377 int rc = 0;
1378
1379 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001380 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001381 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001382 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001383
1384 return rc;
1385}
1386
1387/*===========================================================================
1388 * FUNCTION : configureStreamsPerfLocked
1389 *
1390 * DESCRIPTION: configureStreams while perfLock is held.
1391 *
1392 * PARAMETERS :
1393 * @stream_list : streams to be configured
1394 *
1395 * RETURN : int32_t type of status
1396 * NO_ERROR -- success
1397 * none-zero failure code
1398 *==========================================================================*/
1399int QCamera3HardwareInterface::configureStreamsPerfLocked(
1400 camera3_stream_configuration_t *streamList)
1401{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001402 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001403 int rc = 0;
1404
1405 // Sanity check stream_list
1406 if (streamList == NULL) {
1407 LOGE("NULL stream configuration");
1408 return BAD_VALUE;
1409 }
1410 if (streamList->streams == NULL) {
1411 LOGE("NULL stream list");
1412 return BAD_VALUE;
1413 }
1414
1415 if (streamList->num_streams < 1) {
1416 LOGE("Bad number of streams requested: %d",
1417 streamList->num_streams);
1418 return BAD_VALUE;
1419 }
1420
1421 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1422 LOGE("Maximum number of streams %d exceeded: %d",
1423 MAX_NUM_STREAMS, streamList->num_streams);
1424 return BAD_VALUE;
1425 }
1426
1427 mOpMode = streamList->operation_mode;
1428 LOGD("mOpMode: %d", mOpMode);
1429
1430 /* first invalidate all the steams in the mStreamList
1431 * if they appear again, they will be validated */
1432 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1433 it != mStreamInfo.end(); it++) {
1434 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1435 if (channel) {
1436 channel->stop();
1437 }
1438 (*it)->status = INVALID;
1439 }
1440
1441 if (mRawDumpChannel) {
1442 mRawDumpChannel->stop();
1443 delete mRawDumpChannel;
1444 mRawDumpChannel = NULL;
1445 }
1446
1447 if (mSupportChannel)
1448 mSupportChannel->stop();
1449
1450 if (mAnalysisChannel) {
1451 mAnalysisChannel->stop();
1452 }
1453 if (mMetadataChannel) {
1454 /* If content of mStreamInfo is not 0, there is metadata stream */
1455 mMetadataChannel->stop();
1456 }
1457 if (mChannelHandle) {
1458 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1459 mChannelHandle);
1460 LOGD("stopping channel %d", mChannelHandle);
1461 }
1462
1463 pthread_mutex_lock(&mMutex);
1464
1465 // Check state
1466 switch (mState) {
1467 case INITIALIZED:
1468 case CONFIGURED:
1469 case STARTED:
1470 /* valid state */
1471 break;
1472 default:
1473 LOGE("Invalid state %d", mState);
1474 pthread_mutex_unlock(&mMutex);
1475 return -ENODEV;
1476 }
1477
1478 /* Check whether we have video stream */
1479 m_bIs4KVideo = false;
1480 m_bIsVideo = false;
1481 m_bEisSupportedSize = false;
1482 m_bTnrEnabled = false;
1483 bool isZsl = false;
1484 uint32_t videoWidth = 0U;
1485 uint32_t videoHeight = 0U;
1486 size_t rawStreamCnt = 0;
1487 size_t stallStreamCnt = 0;
1488 size_t processedStreamCnt = 0;
1489 // Number of streams on ISP encoder path
1490 size_t numStreamsOnEncoder = 0;
1491 size_t numYuv888OnEncoder = 0;
1492 bool bYuv888OverrideJpeg = false;
1493 cam_dimension_t largeYuv888Size = {0, 0};
1494 cam_dimension_t maxViewfinderSize = {0, 0};
1495 bool bJpegExceeds4K = false;
1496 bool bJpegOnEncoder = false;
1497 bool bUseCommonFeatureMask = false;
1498 cam_feature_mask_t commonFeatureMask = 0;
1499 bool bSmallJpegSize = false;
1500 uint32_t width_ratio;
1501 uint32_t height_ratio;
1502 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1503 camera3_stream_t *inputStream = NULL;
1504 bool isJpeg = false;
1505 cam_dimension_t jpegSize = {0, 0};
1506
1507 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1508
1509 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001510 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001511 uint8_t eis_prop_set;
1512 uint32_t maxEisWidth = 0;
1513 uint32_t maxEisHeight = 0;
1514
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001515 // Initialize all instant AEC related variables
1516 mInstantAEC = false;
1517 mResetInstantAEC = false;
1518 mInstantAECSettledFrameNumber = 0;
1519 mAecSkipDisplayFrameBound = 0;
1520 mInstantAecFrameIdxCount = 0;
1521
Thierry Strudel3d639192016-09-09 11:52:26 -07001522 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1523
1524 size_t count = IS_TYPE_MAX;
1525 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1526 for (size_t i = 0; i < count; i++) {
1527 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001528 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1529 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001530 break;
1531 }
1532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 count = CAM_OPT_STAB_MAX;
1534 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1535 for (size_t i = 0; i < count; i++) {
1536 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1537 oisSupported = true;
1538 break;
1539 }
1540 }
1541
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001542 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001543 maxEisWidth = MAX_EIS_WIDTH;
1544 maxEisHeight = MAX_EIS_HEIGHT;
1545 }
1546
1547 /* EIS setprop control */
1548 char eis_prop[PROPERTY_VALUE_MAX];
1549 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001550 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001551 eis_prop_set = (uint8_t)atoi(eis_prop);
1552
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001553 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001554 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1555
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001556 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1557 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1558
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 /* stream configurations */
1560 for (size_t i = 0; i < streamList->num_streams; i++) {
1561 camera3_stream_t *newStream = streamList->streams[i];
1562 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1563 "height = %d, rotation = %d, usage = 0x%x",
1564 i, newStream->stream_type, newStream->format,
1565 newStream->width, newStream->height, newStream->rotation,
1566 newStream->usage);
1567 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1568 newStream->stream_type == CAMERA3_STREAM_INPUT){
1569 isZsl = true;
1570 }
1571 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1572 inputStream = newStream;
1573 }
1574
1575 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1576 isJpeg = true;
1577 jpegSize.width = newStream->width;
1578 jpegSize.height = newStream->height;
1579 if (newStream->width > VIDEO_4K_WIDTH ||
1580 newStream->height > VIDEO_4K_HEIGHT)
1581 bJpegExceeds4K = true;
1582 }
1583
1584 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1585 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1586 m_bIsVideo = true;
1587 videoWidth = newStream->width;
1588 videoHeight = newStream->height;
1589 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1590 (VIDEO_4K_HEIGHT <= newStream->height)) {
1591 m_bIs4KVideo = true;
1592 }
1593 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1594 (newStream->height <= maxEisHeight);
1595 }
1596 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1597 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1598 switch (newStream->format) {
1599 case HAL_PIXEL_FORMAT_BLOB:
1600 stallStreamCnt++;
1601 if (isOnEncoder(maxViewfinderSize, newStream->width,
1602 newStream->height)) {
1603 numStreamsOnEncoder++;
1604 bJpegOnEncoder = true;
1605 }
1606 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1607 newStream->width);
1608 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1609 newStream->height);;
1610 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1611 "FATAL: max_downscale_factor cannot be zero and so assert");
1612 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1613 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1614 LOGH("Setting small jpeg size flag to true");
1615 bSmallJpegSize = true;
1616 }
1617 break;
1618 case HAL_PIXEL_FORMAT_RAW10:
1619 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1620 case HAL_PIXEL_FORMAT_RAW16:
1621 rawStreamCnt++;
1622 break;
1623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1624 processedStreamCnt++;
1625 if (isOnEncoder(maxViewfinderSize, newStream->width,
1626 newStream->height)) {
1627 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1628 !IS_USAGE_ZSL(newStream->usage)) {
1629 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1630 }
1631 numStreamsOnEncoder++;
1632 }
1633 break;
1634 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1635 processedStreamCnt++;
1636 if (isOnEncoder(maxViewfinderSize, newStream->width,
1637 newStream->height)) {
1638 // If Yuv888 size is not greater than 4K, set feature mask
1639 // to SUPERSET so that it support concurrent request on
1640 // YUV and JPEG.
1641 if (newStream->width <= VIDEO_4K_WIDTH &&
1642 newStream->height <= VIDEO_4K_HEIGHT) {
1643 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1644 }
1645 numStreamsOnEncoder++;
1646 numYuv888OnEncoder++;
1647 largeYuv888Size.width = newStream->width;
1648 largeYuv888Size.height = newStream->height;
1649 }
1650 break;
1651 default:
1652 processedStreamCnt++;
1653 if (isOnEncoder(maxViewfinderSize, newStream->width,
1654 newStream->height)) {
1655 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1656 numStreamsOnEncoder++;
1657 }
1658 break;
1659 }
1660
1661 }
1662 }
1663
1664 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1665 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1666 !m_bIsVideo) {
1667 m_bEisEnable = false;
1668 }
1669
1670 /* Logic to enable/disable TNR based on specific config size/etc.*/
1671 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1672 ((videoWidth == 1920 && videoHeight == 1080) ||
1673 (videoWidth == 1280 && videoHeight == 720)) &&
1674 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1675 m_bTnrEnabled = true;
1676
1677 /* Check if num_streams is sane */
1678 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1679 rawStreamCnt > MAX_RAW_STREAMS ||
1680 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1681 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1682 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1683 pthread_mutex_unlock(&mMutex);
1684 return -EINVAL;
1685 }
1686 /* Check whether we have zsl stream or 4k video case */
1687 if (isZsl && m_bIsVideo) {
1688 LOGE("Currently invalid configuration ZSL&Video!");
1689 pthread_mutex_unlock(&mMutex);
1690 return -EINVAL;
1691 }
1692 /* Check if stream sizes are sane */
1693 if (numStreamsOnEncoder > 2) {
1694 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1695 pthread_mutex_unlock(&mMutex);
1696 return -EINVAL;
1697 } else if (1 < numStreamsOnEncoder){
1698 bUseCommonFeatureMask = true;
1699 LOGH("Multiple streams above max viewfinder size, common mask needed");
1700 }
1701
1702 /* Check if BLOB size is greater than 4k in 4k recording case */
1703 if (m_bIs4KVideo && bJpegExceeds4K) {
1704 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1705 pthread_mutex_unlock(&mMutex);
1706 return -EINVAL;
1707 }
1708
1709 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1710 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1711 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1712 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1713 // configurations:
1714 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1715 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1716 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1717 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1718 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1719 __func__);
1720 pthread_mutex_unlock(&mMutex);
1721 return -EINVAL;
1722 }
1723
1724 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1725 // the YUV stream's size is greater or equal to the JPEG size, set common
1726 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1727 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1728 jpegSize.width, jpegSize.height) &&
1729 largeYuv888Size.width > jpegSize.width &&
1730 largeYuv888Size.height > jpegSize.height) {
1731 bYuv888OverrideJpeg = true;
1732 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1733 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1734 }
1735
1736 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1737 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1738 commonFeatureMask);
1739 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1740 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1741
1742 rc = validateStreamDimensions(streamList);
1743 if (rc == NO_ERROR) {
1744 rc = validateStreamRotations(streamList);
1745 }
1746 if (rc != NO_ERROR) {
1747 LOGE("Invalid stream configuration requested!");
1748 pthread_mutex_unlock(&mMutex);
1749 return rc;
1750 }
1751
1752 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1753 for (size_t i = 0; i < streamList->num_streams; i++) {
1754 camera3_stream_t *newStream = streamList->streams[i];
1755 LOGH("newStream type = %d, stream format = %d "
1756 "stream size : %d x %d, stream rotation = %d",
1757 newStream->stream_type, newStream->format,
1758 newStream->width, newStream->height, newStream->rotation);
1759 //if the stream is in the mStreamList validate it
1760 bool stream_exists = false;
1761 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1762 it != mStreamInfo.end(); it++) {
1763 if ((*it)->stream == newStream) {
1764 QCamera3ProcessingChannel *channel =
1765 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1766 stream_exists = true;
1767 if (channel)
1768 delete channel;
1769 (*it)->status = VALID;
1770 (*it)->stream->priv = NULL;
1771 (*it)->channel = NULL;
1772 }
1773 }
1774 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1775 //new stream
1776 stream_info_t* stream_info;
1777 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1778 if (!stream_info) {
1779 LOGE("Could not allocate stream info");
1780 rc = -ENOMEM;
1781 pthread_mutex_unlock(&mMutex);
1782 return rc;
1783 }
1784 stream_info->stream = newStream;
1785 stream_info->status = VALID;
1786 stream_info->channel = NULL;
1787 mStreamInfo.push_back(stream_info);
1788 }
1789 /* Covers Opaque ZSL and API1 F/W ZSL */
1790 if (IS_USAGE_ZSL(newStream->usage)
1791 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1792 if (zslStream != NULL) {
1793 LOGE("Multiple input/reprocess streams requested!");
1794 pthread_mutex_unlock(&mMutex);
1795 return BAD_VALUE;
1796 }
1797 zslStream = newStream;
1798 }
1799 /* Covers YUV reprocess */
1800 if (inputStream != NULL) {
1801 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1802 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1803 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1804 && inputStream->width == newStream->width
1805 && inputStream->height == newStream->height) {
1806 if (zslStream != NULL) {
1807 /* This scenario indicates multiple YUV streams with same size
1808 * as input stream have been requested, since zsl stream handle
1809 * is solely use for the purpose of overriding the size of streams
1810 * which share h/w streams we will just make a guess here as to
1811 * which of the stream is a ZSL stream, this will be refactored
1812 * once we make generic logic for streams sharing encoder output
1813 */
1814 LOGH("Warning, Multiple ip/reprocess streams requested!");
1815 }
1816 zslStream = newStream;
1817 }
1818 }
1819 }
1820
1821 /* If a zsl stream is set, we know that we have configured at least one input or
1822 bidirectional stream */
1823 if (NULL != zslStream) {
1824 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1825 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1826 mInputStreamInfo.format = zslStream->format;
1827 mInputStreamInfo.usage = zslStream->usage;
1828 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1829 mInputStreamInfo.dim.width,
1830 mInputStreamInfo.dim.height,
1831 mInputStreamInfo.format, mInputStreamInfo.usage);
1832 }
1833
1834 cleanAndSortStreamInfo();
1835 if (mMetadataChannel) {
1836 delete mMetadataChannel;
1837 mMetadataChannel = NULL;
1838 }
1839 if (mSupportChannel) {
1840 delete mSupportChannel;
1841 mSupportChannel = NULL;
1842 }
1843
1844 if (mAnalysisChannel) {
1845 delete mAnalysisChannel;
1846 mAnalysisChannel = NULL;
1847 }
1848
1849 if (mDummyBatchChannel) {
1850 delete mDummyBatchChannel;
1851 mDummyBatchChannel = NULL;
1852 }
1853
1854 //Create metadata channel and initialize it
1855 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1856 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1857 gCamCapability[mCameraId]->color_arrangement);
1858 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1859 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001860 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 if (mMetadataChannel == NULL) {
1862 LOGE("failed to allocate metadata channel");
1863 rc = -ENOMEM;
1864 pthread_mutex_unlock(&mMutex);
1865 return rc;
1866 }
1867 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1868 if (rc < 0) {
1869 LOGE("metadata channel initialization failed");
1870 delete mMetadataChannel;
1871 mMetadataChannel = NULL;
1872 pthread_mutex_unlock(&mMutex);
1873 return rc;
1874 }
1875
1876 // Create analysis stream all the time, even when h/w support is not available
1877 {
1878 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1879 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1880 gCamCapability[mCameraId]->color_arrangement);
1881 cam_analysis_info_t analysisInfo;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001882 int32_t ret = NO_ERROR;
1883 ret = mCommon.getAnalysisInfo(
Thierry Strudel3d639192016-09-09 11:52:26 -07001884 FALSE,
1885 TRUE,
1886 analysisFeatureMask,
1887 &analysisInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001888 if (ret == NO_ERROR) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001889 mAnalysisChannel = new QCamera3SupportChannel(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001890 mCameraHandle->camera_handle,
1891 mChannelHandle,
1892 mCameraHandle->ops,
1893 &analysisInfo.analysis_padding_info,
1894 analysisFeatureMask,
1895 CAM_STREAM_TYPE_ANALYSIS,
1896 &analysisInfo.analysis_max_res,
1897 (analysisInfo.analysis_format
1898 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1899 : CAM_FORMAT_YUV_420_NV21),
1900 analysisInfo.hw_analysis_supported,
1901 gCamCapability[mCameraId]->color_arrangement,
1902 this,
1903 0); // force buffer count to 0
1904 } else {
1905 LOGW("getAnalysisInfo failed, ret = %d", ret);
1906 }
1907 if (!mAnalysisChannel) {
1908 LOGW("Analysis channel cannot be created");
Thierry Strudel3d639192016-09-09 11:52:26 -07001909 }
1910 }
1911
1912 bool isRawStreamRequested = false;
1913 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1914 /* Allocate channel objects for the requested streams */
1915 for (size_t i = 0; i < streamList->num_streams; i++) {
1916 camera3_stream_t *newStream = streamList->streams[i];
1917 uint32_t stream_usage = newStream->usage;
1918 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1919 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1920 struct camera_info *p_info = NULL;
1921 pthread_mutex_lock(&gCamLock);
1922 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1923 pthread_mutex_unlock(&gCamLock);
1924 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1925 || IS_USAGE_ZSL(newStream->usage)) &&
1926 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1927 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1928 if (bUseCommonFeatureMask) {
1929 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1930 commonFeatureMask;
1931 } else {
1932 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1933 CAM_QCOM_FEATURE_NONE;
1934 }
1935
1936 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1937 LOGH("Input stream configured, reprocess config");
1938 } else {
1939 //for non zsl streams find out the format
1940 switch (newStream->format) {
1941 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1942 {
1943 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1944 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1945 /* add additional features to pp feature mask */
1946 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1947 mStreamConfigInfo.num_streams);
1948
1949 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1951 CAM_STREAM_TYPE_VIDEO;
1952 if (m_bTnrEnabled && m_bTnrVideo) {
1953 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1954 CAM_QCOM_FEATURE_CPP_TNR;
1955 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1956 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1957 ~CAM_QCOM_FEATURE_CDS;
1958 }
1959 } else {
1960 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1961 CAM_STREAM_TYPE_PREVIEW;
1962 if (m_bTnrEnabled && m_bTnrPreview) {
1963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1964 CAM_QCOM_FEATURE_CPP_TNR;
1965 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1966 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1967 ~CAM_QCOM_FEATURE_CDS;
1968 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001969 if(!m_bSwTnrPreview) {
1970 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1971 ~CAM_QTI_FEATURE_SW_TNR;
1972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001973 padding_info.width_padding = mSurfaceStridePadding;
1974 padding_info.height_padding = CAM_PAD_TO_2;
1975 }
1976 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1977 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1978 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1979 newStream->height;
1980 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1981 newStream->width;
1982 }
1983 }
1984 break;
1985 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1986 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1987 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1988 if (bUseCommonFeatureMask)
1989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1990 commonFeatureMask;
1991 else
1992 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1993 CAM_QCOM_FEATURE_NONE;
1994 } else {
1995 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1996 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1997 }
1998 break;
1999 case HAL_PIXEL_FORMAT_BLOB:
2000 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2001 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2002 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2003 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2004 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2005 } else {
2006 if (bUseCommonFeatureMask &&
2007 isOnEncoder(maxViewfinderSize, newStream->width,
2008 newStream->height)) {
2009 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2010 } else {
2011 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2012 }
2013 }
2014 if (isZsl) {
2015 if (zslStream) {
2016 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2017 (int32_t)zslStream->width;
2018 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2019 (int32_t)zslStream->height;
2020 } else {
2021 LOGE("Error, No ZSL stream identified");
2022 pthread_mutex_unlock(&mMutex);
2023 return -EINVAL;
2024 }
2025 } else if (m_bIs4KVideo) {
2026 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2027 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2028 } else if (bYuv888OverrideJpeg) {
2029 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2030 (int32_t)largeYuv888Size.width;
2031 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2032 (int32_t)largeYuv888Size.height;
2033 }
2034 break;
2035 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2036 case HAL_PIXEL_FORMAT_RAW16:
2037 case HAL_PIXEL_FORMAT_RAW10:
2038 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2040 isRawStreamRequested = true;
2041 break;
2042 default:
2043 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2044 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2045 break;
2046 }
2047 }
2048
2049 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2050 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2051 gCamCapability[mCameraId]->color_arrangement);
2052
2053 if (newStream->priv == NULL) {
2054 //New stream, construct channel
2055 switch (newStream->stream_type) {
2056 case CAMERA3_STREAM_INPUT:
2057 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2058 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2059 break;
2060 case CAMERA3_STREAM_BIDIRECTIONAL:
2061 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2062 GRALLOC_USAGE_HW_CAMERA_WRITE;
2063 break;
2064 case CAMERA3_STREAM_OUTPUT:
2065 /* For video encoding stream, set read/write rarely
2066 * flag so that they may be set to un-cached */
2067 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2068 newStream->usage |=
2069 (GRALLOC_USAGE_SW_READ_RARELY |
2070 GRALLOC_USAGE_SW_WRITE_RARELY |
2071 GRALLOC_USAGE_HW_CAMERA_WRITE);
2072 else if (IS_USAGE_ZSL(newStream->usage))
2073 {
2074 LOGD("ZSL usage flag skipping");
2075 }
2076 else if (newStream == zslStream
2077 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2078 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2079 } else
2080 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2081 break;
2082 default:
2083 LOGE("Invalid stream_type %d", newStream->stream_type);
2084 break;
2085 }
2086
2087 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2088 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2089 QCamera3ProcessingChannel *channel = NULL;
2090 switch (newStream->format) {
2091 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2092 if ((newStream->usage &
2093 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2094 (streamList->operation_mode ==
2095 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2096 ) {
2097 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2098 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002099 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002100 this,
2101 newStream,
2102 (cam_stream_type_t)
2103 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2104 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2105 mMetadataChannel,
2106 0); //heap buffers are not required for HFR video channel
2107 if (channel == NULL) {
2108 LOGE("allocation of channel failed");
2109 pthread_mutex_unlock(&mMutex);
2110 return -ENOMEM;
2111 }
2112 //channel->getNumBuffers() will return 0 here so use
2113 //MAX_INFLIGH_HFR_REQUESTS
2114 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2115 newStream->priv = channel;
2116 LOGI("num video buffers in HFR mode: %d",
2117 MAX_INFLIGHT_HFR_REQUESTS);
2118 } else {
2119 /* Copy stream contents in HFR preview only case to create
2120 * dummy batch channel so that sensor streaming is in
2121 * HFR mode */
2122 if (!m_bIsVideo && (streamList->operation_mode ==
2123 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2124 mDummyBatchStream = *newStream;
2125 }
2126 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2127 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002128 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002129 this,
2130 newStream,
2131 (cam_stream_type_t)
2132 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2133 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2134 mMetadataChannel,
2135 MAX_INFLIGHT_REQUESTS);
2136 if (channel == NULL) {
2137 LOGE("allocation of channel failed");
2138 pthread_mutex_unlock(&mMutex);
2139 return -ENOMEM;
2140 }
2141 newStream->max_buffers = channel->getNumBuffers();
2142 newStream->priv = channel;
2143 }
2144 break;
2145 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2146 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2147 mChannelHandle,
2148 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002149 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002150 this,
2151 newStream,
2152 (cam_stream_type_t)
2153 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2154 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2155 mMetadataChannel);
2156 if (channel == NULL) {
2157 LOGE("allocation of YUV channel failed");
2158 pthread_mutex_unlock(&mMutex);
2159 return -ENOMEM;
2160 }
2161 newStream->max_buffers = channel->getNumBuffers();
2162 newStream->priv = channel;
2163 break;
2164 }
2165 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2166 case HAL_PIXEL_FORMAT_RAW16:
2167 case HAL_PIXEL_FORMAT_RAW10:
2168 mRawChannel = new QCamera3RawChannel(
2169 mCameraHandle->camera_handle, mChannelHandle,
2170 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002171 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002172 this, newStream,
2173 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2174 mMetadataChannel,
2175 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2176 if (mRawChannel == NULL) {
2177 LOGE("allocation of raw channel failed");
2178 pthread_mutex_unlock(&mMutex);
2179 return -ENOMEM;
2180 }
2181 newStream->max_buffers = mRawChannel->getNumBuffers();
2182 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2183 break;
2184 case HAL_PIXEL_FORMAT_BLOB:
2185 // Max live snapshot inflight buffer is 1. This is to mitigate
2186 // frame drop issues for video snapshot. The more buffers being
2187 // allocated, the more frame drops there are.
2188 mPictureChannel = new QCamera3PicChannel(
2189 mCameraHandle->camera_handle, mChannelHandle,
2190 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002191 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002192 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2193 m_bIs4KVideo, isZsl, mMetadataChannel,
2194 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2195 if (mPictureChannel == NULL) {
2196 LOGE("allocation of channel failed");
2197 pthread_mutex_unlock(&mMutex);
2198 return -ENOMEM;
2199 }
2200 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2201 newStream->max_buffers = mPictureChannel->getNumBuffers();
2202 mPictureChannel->overrideYuvSize(
2203 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2204 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2205 break;
2206
2207 default:
2208 LOGE("not a supported format 0x%x", newStream->format);
2209 break;
2210 }
2211 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2212 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2213 } else {
2214 LOGE("Error, Unknown stream type");
2215 pthread_mutex_unlock(&mMutex);
2216 return -EINVAL;
2217 }
2218
2219 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2220 if (channel != NULL && channel->isUBWCEnabled()) {
2221 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002222 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2223 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002224 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2225 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2226 }
2227 }
2228
2229 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2230 it != mStreamInfo.end(); it++) {
2231 if ((*it)->stream == newStream) {
2232 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2233 break;
2234 }
2235 }
2236 } else {
2237 // Channel already exists for this stream
2238 // Do nothing for now
2239 }
2240 padding_info = gCamCapability[mCameraId]->padding_info;
2241
2242 /* Do not add entries for input stream in metastream info
2243 * since there is no real stream associated with it
2244 */
2245 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2246 mStreamConfigInfo.num_streams++;
2247 }
2248
2249 //RAW DUMP channel
2250 if (mEnableRawDump && isRawStreamRequested == false){
2251 cam_dimension_t rawDumpSize;
2252 rawDumpSize = getMaxRawSize(mCameraId);
2253 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2254 setPAAFSupport(rawDumpFeatureMask,
2255 CAM_STREAM_TYPE_RAW,
2256 gCamCapability[mCameraId]->color_arrangement);
2257 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2258 mChannelHandle,
2259 mCameraHandle->ops,
2260 rawDumpSize,
2261 &padding_info,
2262 this, rawDumpFeatureMask);
2263 if (!mRawDumpChannel) {
2264 LOGE("Raw Dump channel cannot be created");
2265 pthread_mutex_unlock(&mMutex);
2266 return -ENOMEM;
2267 }
2268 }
2269
2270
2271 if (mAnalysisChannel) {
2272 cam_analysis_info_t analysisInfo;
2273 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2274 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2275 CAM_STREAM_TYPE_ANALYSIS;
2276 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2277 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2278 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2279 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2280 gCamCapability[mCameraId]->color_arrangement);
2281 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2283 &analysisInfo);
2284 if (rc != NO_ERROR) {
2285 LOGE("getAnalysisInfo failed, ret = %d", rc);
2286 pthread_mutex_unlock(&mMutex);
2287 return rc;
2288 }
2289 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2290 analysisInfo.analysis_max_res;
2291 mStreamConfigInfo.num_streams++;
2292 }
2293
2294 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2295 cam_analysis_info_t supportInfo;
2296 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2297 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2298 setPAAFSupport(callbackFeatureMask,
2299 CAM_STREAM_TYPE_CALLBACK,
2300 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002301 int32_t ret = NO_ERROR;
2302 ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2303 if (ret != NO_ERROR) {
2304 /* Ignore the error for Mono camera
2305 * because the PAAF bit mask is only set
2306 * for CAM_STREAM_TYPE_ANALYSIS stream type
2307 */
2308 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2309 LOGW("getAnalysisInfo failed, ret = %d", ret);
2310 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 }
2312 mSupportChannel = new QCamera3SupportChannel(
2313 mCameraHandle->camera_handle,
2314 mChannelHandle,
2315 mCameraHandle->ops,
2316 &gCamCapability[mCameraId]->padding_info,
2317 callbackFeatureMask,
2318 CAM_STREAM_TYPE_CALLBACK,
2319 &QCamera3SupportChannel::kDim,
2320 CAM_FORMAT_YUV_420_NV21,
2321 supportInfo.hw_analysis_supported,
2322 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002323 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002324 if (!mSupportChannel) {
2325 LOGE("dummy channel cannot be created");
2326 pthread_mutex_unlock(&mMutex);
2327 return -ENOMEM;
2328 }
2329 }
2330
2331 if (mSupportChannel) {
2332 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2333 QCamera3SupportChannel::kDim;
2334 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2335 CAM_STREAM_TYPE_CALLBACK;
2336 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2337 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2338 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2339 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mStreamConfigInfo.num_streams++;
2342 }
2343
2344 if (mRawDumpChannel) {
2345 cam_dimension_t rawSize;
2346 rawSize = getMaxRawSize(mCameraId);
2347 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2348 rawSize;
2349 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2350 CAM_STREAM_TYPE_RAW;
2351 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2352 CAM_QCOM_FEATURE_NONE;
2353 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2354 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2355 gCamCapability[mCameraId]->color_arrangement);
2356 mStreamConfigInfo.num_streams++;
2357 }
2358 /* In HFR mode, if video stream is not added, create a dummy channel so that
2359 * ISP can create a batch mode even for preview only case. This channel is
2360 * never 'start'ed (no stream-on), it is only 'initialized' */
2361 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2362 !m_bIsVideo) {
2363 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2364 setPAAFSupport(dummyFeatureMask,
2365 CAM_STREAM_TYPE_VIDEO,
2366 gCamCapability[mCameraId]->color_arrangement);
2367 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2368 mChannelHandle,
2369 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002370 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002371 this,
2372 &mDummyBatchStream,
2373 CAM_STREAM_TYPE_VIDEO,
2374 dummyFeatureMask,
2375 mMetadataChannel);
2376 if (NULL == mDummyBatchChannel) {
2377 LOGE("creation of mDummyBatchChannel failed."
2378 "Preview will use non-hfr sensor mode ");
2379 }
2380 }
2381 if (mDummyBatchChannel) {
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2383 mDummyBatchStream.width;
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2385 mDummyBatchStream.height;
2386 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2387 CAM_STREAM_TYPE_VIDEO;
2388 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2389 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2390 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2391 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2392 gCamCapability[mCameraId]->color_arrangement);
2393 mStreamConfigInfo.num_streams++;
2394 }
2395
2396 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2397 mStreamConfigInfo.buffer_info.max_buffers =
2398 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2399
2400 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2401 for (pendingRequestIterator i = mPendingRequestsList.begin();
2402 i != mPendingRequestsList.end();) {
2403 i = erasePendingRequest(i);
2404 }
2405 mPendingFrameDropList.clear();
2406 // Initialize/Reset the pending buffers list
2407 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2408 req.mPendingBufferList.clear();
2409 }
2410 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2411
2412 mPendingReprocessResultList.clear();
2413
2414 mCurJpegMeta.clear();
2415 //Get min frame duration for this streams configuration
2416 deriveMinFrameDuration();
2417
2418 // Update state
2419 mState = CONFIGURED;
2420
2421 pthread_mutex_unlock(&mMutex);
2422
2423 return rc;
2424}
2425
2426/*===========================================================================
2427 * FUNCTION : validateCaptureRequest
2428 *
2429 * DESCRIPTION: validate a capture request from camera service
2430 *
2431 * PARAMETERS :
2432 * @request : request from framework to process
2433 *
2434 * RETURN :
2435 *
2436 *==========================================================================*/
2437int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002438 camera3_capture_request_t *request,
2439 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002440{
2441 ssize_t idx = 0;
2442 const camera3_stream_buffer_t *b;
2443 CameraMetadata meta;
2444
2445 /* Sanity check the request */
2446 if (request == NULL) {
2447 LOGE("NULL capture request");
2448 return BAD_VALUE;
2449 }
2450
2451 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2452 /*settings cannot be null for the first request*/
2453 return BAD_VALUE;
2454 }
2455
2456 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002457 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2458 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002459 LOGE("Request %d: No output buffers provided!",
2460 __FUNCTION__, frameNumber);
2461 return BAD_VALUE;
2462 }
2463 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2464 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2465 request->num_output_buffers, MAX_NUM_STREAMS);
2466 return BAD_VALUE;
2467 }
2468 if (request->input_buffer != NULL) {
2469 b = request->input_buffer;
2470 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2471 LOGE("Request %d: Buffer %ld: Status not OK!",
2472 frameNumber, (long)idx);
2473 return BAD_VALUE;
2474 }
2475 if (b->release_fence != -1) {
2476 LOGE("Request %d: Buffer %ld: Has a release fence!",
2477 frameNumber, (long)idx);
2478 return BAD_VALUE;
2479 }
2480 if (b->buffer == NULL) {
2481 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2482 frameNumber, (long)idx);
2483 return BAD_VALUE;
2484 }
2485 }
2486
2487 // Validate all buffers
2488 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002489 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002490 QCamera3ProcessingChannel *channel =
2491 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2492 if (channel == NULL) {
2493 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2494 frameNumber, (long)idx);
2495 return BAD_VALUE;
2496 }
2497 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2498 LOGE("Request %d: Buffer %ld: Status not OK!",
2499 frameNumber, (long)idx);
2500 return BAD_VALUE;
2501 }
2502 if (b->release_fence != -1) {
2503 LOGE("Request %d: Buffer %ld: Has a release fence!",
2504 frameNumber, (long)idx);
2505 return BAD_VALUE;
2506 }
2507 if (b->buffer == NULL) {
2508 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2509 frameNumber, (long)idx);
2510 return BAD_VALUE;
2511 }
2512 if (*(b->buffer) == NULL) {
2513 LOGE("Request %d: Buffer %ld: NULL private handle!",
2514 frameNumber, (long)idx);
2515 return BAD_VALUE;
2516 }
2517 idx++;
2518 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002519 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002520 return NO_ERROR;
2521}
2522
2523/*===========================================================================
2524 * FUNCTION : deriveMinFrameDuration
2525 *
2526 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2527 * on currently configured streams.
2528 *
2529 * PARAMETERS : NONE
2530 *
2531 * RETURN : NONE
2532 *
2533 *==========================================================================*/
2534void QCamera3HardwareInterface::deriveMinFrameDuration()
2535{
2536 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2537
2538 maxJpegDim = 0;
2539 maxProcessedDim = 0;
2540 maxRawDim = 0;
2541
2542 // Figure out maximum jpeg, processed, and raw dimensions
2543 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2544 it != mStreamInfo.end(); it++) {
2545
2546 // Input stream doesn't have valid stream_type
2547 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2548 continue;
2549
2550 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2551 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2552 if (dimension > maxJpegDim)
2553 maxJpegDim = dimension;
2554 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2555 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2556 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2557 if (dimension > maxRawDim)
2558 maxRawDim = dimension;
2559 } else {
2560 if (dimension > maxProcessedDim)
2561 maxProcessedDim = dimension;
2562 }
2563 }
2564
2565 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2566 MAX_SIZES_CNT);
2567
2568 //Assume all jpeg dimensions are in processed dimensions.
2569 if (maxJpegDim > maxProcessedDim)
2570 maxProcessedDim = maxJpegDim;
2571 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2572 if (maxProcessedDim > maxRawDim) {
2573 maxRawDim = INT32_MAX;
2574
2575 for (size_t i = 0; i < count; i++) {
2576 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2577 gCamCapability[mCameraId]->raw_dim[i].height;
2578 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2579 maxRawDim = dimension;
2580 }
2581 }
2582
2583 //Find minimum durations for processed, jpeg, and raw
2584 for (size_t i = 0; i < count; i++) {
2585 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2586 gCamCapability[mCameraId]->raw_dim[i].height) {
2587 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2588 break;
2589 }
2590 }
2591 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2592 for (size_t i = 0; i < count; i++) {
2593 if (maxProcessedDim ==
2594 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2595 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2596 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2597 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2598 break;
2599 }
2600 }
2601}
2602
2603/*===========================================================================
2604 * FUNCTION : getMinFrameDuration
2605 *
2606 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2607 * and current request configuration.
2608 *
2609 * PARAMETERS : @request: requset sent by the frameworks
2610 *
2611 * RETURN : min farme duration for a particular request
2612 *
2613 *==========================================================================*/
2614int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2615{
2616 bool hasJpegStream = false;
2617 bool hasRawStream = false;
2618 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2619 const camera3_stream_t *stream = request->output_buffers[i].stream;
2620 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2621 hasJpegStream = true;
2622 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2623 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2624 stream->format == HAL_PIXEL_FORMAT_RAW16)
2625 hasRawStream = true;
2626 }
2627
2628 if (!hasJpegStream)
2629 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2630 else
2631 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2632}
2633
2634/*===========================================================================
2635 * FUNCTION : handleBuffersDuringFlushLock
2636 *
2637 * DESCRIPTION: Account for buffers returned from back-end during flush
2638 * This function is executed while mMutex is held by the caller.
2639 *
2640 * PARAMETERS :
2641 * @buffer: image buffer for the callback
2642 *
2643 * RETURN :
2644 *==========================================================================*/
2645void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2646{
2647 bool buffer_found = false;
2648 for (List<PendingBuffersInRequest>::iterator req =
2649 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2650 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2651 for (List<PendingBufferInfo>::iterator i =
2652 req->mPendingBufferList.begin();
2653 i != req->mPendingBufferList.end(); i++) {
2654 if (i->buffer == buffer->buffer) {
2655 mPendingBuffersMap.numPendingBufsAtFlush--;
2656 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2657 buffer->buffer, req->frame_number,
2658 mPendingBuffersMap.numPendingBufsAtFlush);
2659 buffer_found = true;
2660 break;
2661 }
2662 }
2663 if (buffer_found) {
2664 break;
2665 }
2666 }
2667 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2668 //signal the flush()
2669 LOGD("All buffers returned to HAL. Continue flush");
2670 pthread_cond_signal(&mBuffersCond);
2671 }
2672}
2673
2674
2675/*===========================================================================
2676 * FUNCTION : handlePendingReprocResults
2677 *
2678 * DESCRIPTION: check and notify on any pending reprocess results
2679 *
2680 * PARAMETERS :
2681 * @frame_number : Pending request frame number
2682 *
2683 * RETURN : int32_t type of status
2684 * NO_ERROR -- success
2685 * none-zero failure code
2686 *==========================================================================*/
2687int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2688{
2689 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2690 j != mPendingReprocessResultList.end(); j++) {
2691 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002692 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002693
2694 LOGD("Delayed reprocess notify %d",
2695 frame_number);
2696
2697 for (pendingRequestIterator k = mPendingRequestsList.begin();
2698 k != mPendingRequestsList.end(); k++) {
2699
2700 if (k->frame_number == j->frame_number) {
2701 LOGD("Found reprocess frame number %d in pending reprocess List "
2702 "Take it out!!",
2703 k->frame_number);
2704
2705 camera3_capture_result result;
2706 memset(&result, 0, sizeof(camera3_capture_result));
2707 result.frame_number = frame_number;
2708 result.num_output_buffers = 1;
2709 result.output_buffers = &j->buffer;
2710 result.input_buffer = k->input_buffer;
2711 result.result = k->settings;
2712 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002713 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002714
2715 erasePendingRequest(k);
2716 break;
2717 }
2718 }
2719 mPendingReprocessResultList.erase(j);
2720 break;
2721 }
2722 }
2723 return NO_ERROR;
2724}
2725
2726/*===========================================================================
2727 * FUNCTION : handleBatchMetadata
2728 *
2729 * DESCRIPTION: Handles metadata buffer callback in batch mode
2730 *
2731 * PARAMETERS : @metadata_buf: metadata buffer
2732 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2733 * the meta buf in this method
2734 *
2735 * RETURN :
2736 *
2737 *==========================================================================*/
2738void QCamera3HardwareInterface::handleBatchMetadata(
2739 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2740{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002741 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002742
2743 if (NULL == metadata_buf) {
2744 LOGE("metadata_buf is NULL");
2745 return;
2746 }
2747 /* In batch mode, the metdata will contain the frame number and timestamp of
2748 * the last frame in the batch. Eg: a batch containing buffers from request
2749 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2750 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2751 * multiple process_capture_results */
2752 metadata_buffer_t *metadata =
2753 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2754 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2755 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2756 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2757 uint32_t frame_number = 0, urgent_frame_number = 0;
2758 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2759 bool invalid_metadata = false;
2760 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2761 size_t loopCount = 1;
2762
2763 int32_t *p_frame_number_valid =
2764 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2765 uint32_t *p_frame_number =
2766 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2767 int64_t *p_capture_time =
2768 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2769 int32_t *p_urgent_frame_number_valid =
2770 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2771 uint32_t *p_urgent_frame_number =
2772 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2773
2774 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2775 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2776 (NULL == p_urgent_frame_number)) {
2777 LOGE("Invalid metadata");
2778 invalid_metadata = true;
2779 } else {
2780 frame_number_valid = *p_frame_number_valid;
2781 last_frame_number = *p_frame_number;
2782 last_frame_capture_time = *p_capture_time;
2783 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2784 last_urgent_frame_number = *p_urgent_frame_number;
2785 }
2786
2787 /* In batchmode, when no video buffers are requested, set_parms are sent
2788 * for every capture_request. The difference between consecutive urgent
2789 * frame numbers and frame numbers should be used to interpolate the
2790 * corresponding frame numbers and time stamps */
2791 pthread_mutex_lock(&mMutex);
2792 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002793 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2794 if(idx < 0) {
2795 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2796 last_urgent_frame_number);
2797 mState = ERROR;
2798 pthread_mutex_unlock(&mMutex);
2799 return;
2800 }
2801 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002802 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2803 first_urgent_frame_number;
2804
2805 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2806 urgent_frame_number_valid,
2807 first_urgent_frame_number, last_urgent_frame_number);
2808 }
2809
2810 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002811 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2812 if(idx < 0) {
2813 LOGE("Invalid frame number received: %d. Irrecoverable error",
2814 last_frame_number);
2815 mState = ERROR;
2816 pthread_mutex_unlock(&mMutex);
2817 return;
2818 }
2819 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 frameNumDiff = last_frame_number + 1 -
2821 first_frame_number;
2822 mPendingBatchMap.removeItem(last_frame_number);
2823
2824 LOGD("frm: valid: %d frm_num: %d - %d",
2825 frame_number_valid,
2826 first_frame_number, last_frame_number);
2827
2828 }
2829 pthread_mutex_unlock(&mMutex);
2830
2831 if (urgent_frame_number_valid || frame_number_valid) {
2832 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2833 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2834 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2835 urgentFrameNumDiff, last_urgent_frame_number);
2836 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2837 LOGE("frameNumDiff: %d frameNum: %d",
2838 frameNumDiff, last_frame_number);
2839 }
2840
2841 for (size_t i = 0; i < loopCount; i++) {
2842 /* handleMetadataWithLock is called even for invalid_metadata for
2843 * pipeline depth calculation */
2844 if (!invalid_metadata) {
2845 /* Infer frame number. Batch metadata contains frame number of the
2846 * last frame */
2847 if (urgent_frame_number_valid) {
2848 if (i < urgentFrameNumDiff) {
2849 urgent_frame_number =
2850 first_urgent_frame_number + i;
2851 LOGD("inferred urgent frame_number: %d",
2852 urgent_frame_number);
2853 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2854 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2855 } else {
2856 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2857 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2858 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2859 }
2860 }
2861
2862 /* Infer frame number. Batch metadata contains frame number of the
2863 * last frame */
2864 if (frame_number_valid) {
2865 if (i < frameNumDiff) {
2866 frame_number = first_frame_number + i;
2867 LOGD("inferred frame_number: %d", frame_number);
2868 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2869 CAM_INTF_META_FRAME_NUMBER, frame_number);
2870 } else {
2871 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2872 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2873 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2874 }
2875 }
2876
2877 if (last_frame_capture_time) {
2878 //Infer timestamp
2879 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002880 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002882 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2884 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2885 LOGD("batch capture_time: %lld, capture_time: %lld",
2886 last_frame_capture_time, capture_time);
2887 }
2888 }
2889 pthread_mutex_lock(&mMutex);
2890 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002891 false /* free_and_bufdone_meta_buf */,
2892 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002893 pthread_mutex_unlock(&mMutex);
2894 }
2895
2896 /* BufDone metadata buffer */
2897 if (free_and_bufdone_meta_buf) {
2898 mMetadataChannel->bufDone(metadata_buf);
2899 free(metadata_buf);
2900 }
2901}
2902
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002903void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2904 camera3_error_msg_code_t errorCode)
2905{
2906 camera3_notify_msg_t notify_msg;
2907 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2908 notify_msg.type = CAMERA3_MSG_ERROR;
2909 notify_msg.message.error.error_code = errorCode;
2910 notify_msg.message.error.error_stream = NULL;
2911 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002912 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002913
2914 return;
2915}
Thierry Strudel3d639192016-09-09 11:52:26 -07002916/*===========================================================================
2917 * FUNCTION : handleMetadataWithLock
2918 *
2919 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2920 *
2921 * PARAMETERS : @metadata_buf: metadata buffer
2922 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2923 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002924 * @firstMetadataInBatch: Boolean to indicate whether this is the
2925 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 *
2927 * RETURN :
2928 *
2929 *==========================================================================*/
2930void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002931 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2932 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002933{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002934 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002935 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2936 //during flush do not send metadata from this thread
2937 LOGD("not sending metadata during flush or when mState is error");
2938 if (free_and_bufdone_meta_buf) {
2939 mMetadataChannel->bufDone(metadata_buf);
2940 free(metadata_buf);
2941 }
2942 return;
2943 }
2944
2945 //not in flush
2946 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2947 int32_t frame_number_valid, urgent_frame_number_valid;
2948 uint32_t frame_number, urgent_frame_number;
2949 int64_t capture_time;
2950 nsecs_t currentSysTime;
2951
2952 int32_t *p_frame_number_valid =
2953 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2954 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2955 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2956 int32_t *p_urgent_frame_number_valid =
2957 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2958 uint32_t *p_urgent_frame_number =
2959 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2960 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2961 metadata) {
2962 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2963 *p_frame_number_valid, *p_frame_number);
2964 }
2965
2966 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2967 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2968 LOGE("Invalid metadata");
2969 if (free_and_bufdone_meta_buf) {
2970 mMetadataChannel->bufDone(metadata_buf);
2971 free(metadata_buf);
2972 }
2973 goto done_metadata;
2974 }
2975 frame_number_valid = *p_frame_number_valid;
2976 frame_number = *p_frame_number;
2977 capture_time = *p_capture_time;
2978 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2979 urgent_frame_number = *p_urgent_frame_number;
2980 currentSysTime = systemTime(CLOCK_MONOTONIC);
2981
2982 // Detect if buffers from any requests are overdue
2983 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2984 if ( (currentSysTime - req.timestamp) >
2985 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2986 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002987 assert(missed.stream->priv);
2988 if (missed.stream->priv) {
2989 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
2990 assert(ch->mStreams[0]);
2991 if (ch->mStreams[0]) {
2992 LOGE("Cancel missing frame = %d, buffer = %p,"
2993 "stream type = %d, stream format = %d",
2994 req.frame_number, missed.buffer,
2995 ch->mStreams[0]->getMyType(), missed.stream->format);
2996 ch->timeoutFrame(req.frame_number);
2997 }
2998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002999 }
3000 }
3001 }
3002 //Partial result on process_capture_result for timestamp
3003 if (urgent_frame_number_valid) {
3004 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3005 urgent_frame_number, capture_time);
3006
3007 //Recieved an urgent Frame Number, handle it
3008 //using partial results
3009 for (pendingRequestIterator i =
3010 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3011 LOGD("Iterator Frame = %d urgent frame = %d",
3012 i->frame_number, urgent_frame_number);
3013
3014 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3015 (i->partial_result_cnt == 0)) {
3016 LOGE("Error: HAL missed urgent metadata for frame number %d",
3017 i->frame_number);
3018 }
3019
3020 if (i->frame_number == urgent_frame_number &&
3021 i->bUrgentReceived == 0) {
3022
3023 camera3_capture_result_t result;
3024 memset(&result, 0, sizeof(camera3_capture_result_t));
3025
3026 i->partial_result_cnt++;
3027 i->bUrgentReceived = 1;
3028 // Extract 3A metadata
3029 result.result =
3030 translateCbUrgentMetadataToResultMetadata(metadata);
3031 // Populate metadata result
3032 result.frame_number = urgent_frame_number;
3033 result.num_output_buffers = 0;
3034 result.output_buffers = NULL;
3035 result.partial_result = i->partial_result_cnt;
3036
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003037 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003038 LOGD("urgent frame_number = %u, capture_time = %lld",
3039 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003040 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3041 // Instant AEC settled for this frame.
3042 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3043 mInstantAECSettledFrameNumber = urgent_frame_number;
3044 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003045 free_camera_metadata((camera_metadata_t *)result.result);
3046 break;
3047 }
3048 }
3049 }
3050
3051 if (!frame_number_valid) {
3052 LOGD("Not a valid normal frame number, used as SOF only");
3053 if (free_and_bufdone_meta_buf) {
3054 mMetadataChannel->bufDone(metadata_buf);
3055 free(metadata_buf);
3056 }
3057 goto done_metadata;
3058 }
3059 LOGH("valid frame_number = %u, capture_time = %lld",
3060 frame_number, capture_time);
3061
3062 for (pendingRequestIterator i = mPendingRequestsList.begin();
3063 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3064 // Flush out all entries with less or equal frame numbers.
3065
3066 camera3_capture_result_t result;
3067 memset(&result, 0, sizeof(camera3_capture_result_t));
3068
3069 LOGD("frame_number in the list is %u", i->frame_number);
3070 i->partial_result_cnt++;
3071 result.partial_result = i->partial_result_cnt;
3072
3073 // Check whether any stream buffer corresponding to this is dropped or not
3074 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003075 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3076 if (p_cam_frame_drop ||
3077 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003078 /* Clear notify_msg structure */
3079 camera3_notify_msg_t notify_msg;
3080 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3081 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3082 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003083 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003084 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3085 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003086 if (p_cam_frame_drop) {
3087 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003088 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003089 // Got the stream ID for drop frame.
3090 dropFrame = true;
3091 break;
3092 }
3093 }
3094 } else {
3095 // This is instant AEC case.
3096 // For instant AEC drop the stream untill AEC is settled.
3097 dropFrame = true;
3098 }
3099 if (dropFrame) {
3100 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3101 if (p_cam_frame_drop) {
3102 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003103 LOGE("Start of reporting error frame#=%u, streamID=%u",
3104 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003105 } else {
3106 // For instant AEC, inform frame drop and frame number
3107 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3108 "AEC settled frame number = %u",
3109 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3110 }
3111 notify_msg.type = CAMERA3_MSG_ERROR;
3112 notify_msg.message.error.frame_number = i->frame_number;
3113 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3114 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003115 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003116 if (p_cam_frame_drop) {
3117 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003118 LOGE("End of reporting error frame#=%u, streamID=%u",
3119 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003120 } else {
3121 // For instant AEC, inform frame drop and frame number
3122 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3123 "AEC settled frame number = %u",
3124 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3125 }
3126 PendingFrameDropInfo PendingFrameDrop;
3127 PendingFrameDrop.frame_number=i->frame_number;
3128 PendingFrameDrop.stream_ID = streamID;
3129 // Add the Frame drop info to mPendingFrameDropList
3130 mPendingFrameDropList.push_back(PendingFrameDrop);
3131 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003132 }
3133 }
3134
3135 // Send empty metadata with already filled buffers for dropped metadata
3136 // and send valid metadata with already filled buffers for current metadata
3137 /* we could hit this case when we either
3138 * 1. have a pending reprocess request or
3139 * 2. miss a metadata buffer callback */
3140 if (i->frame_number < frame_number) {
3141 if (i->input_buffer) {
3142 /* this will be handled in handleInputBufferWithLock */
3143 i++;
3144 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003145 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003146
3147 mPendingLiveRequest--;
3148
3149 CameraMetadata dummyMetadata;
3150 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3151 result.result = dummyMetadata.release();
3152
3153 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003154 }
3155 } else {
3156 mPendingLiveRequest--;
3157 /* Clear notify_msg structure */
3158 camera3_notify_msg_t notify_msg;
3159 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3160
3161 // Send shutter notify to frameworks
3162 notify_msg.type = CAMERA3_MSG_SHUTTER;
3163 notify_msg.message.shutter.frame_number = i->frame_number;
3164 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003165 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003166
3167 i->timestamp = capture_time;
3168
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003169 /* Set the timestamp in display metadata so that clients aware of
3170 private_handle such as VT can use this un-modified timestamps.
3171 Camera framework is unaware of this timestamp and cannot change this */
3172 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3173
Thierry Strudel3d639192016-09-09 11:52:26 -07003174 // Find channel requiring metadata, meaning internal offline postprocess
3175 // is needed.
3176 //TODO: for now, we don't support two streams requiring metadata at the same time.
3177 // (because we are not making copies, and metadata buffer is not reference counted.
3178 bool internalPproc = false;
3179 for (pendingBufferIterator iter = i->buffers.begin();
3180 iter != i->buffers.end(); iter++) {
3181 if (iter->need_metadata) {
3182 internalPproc = true;
3183 QCamera3ProcessingChannel *channel =
3184 (QCamera3ProcessingChannel *)iter->stream->priv;
3185 channel->queueReprocMetadata(metadata_buf);
3186 break;
3187 }
3188 }
3189
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003190 for (auto itr = i->internalRequestList.begin();
3191 itr != i->internalRequestList.end(); itr++) {
3192 if (itr->need_metadata) {
3193 internalPproc = true;
3194 QCamera3ProcessingChannel *channel =
3195 (QCamera3ProcessingChannel *)itr->stream->priv;
3196 channel->queueReprocMetadata(metadata_buf);
3197 break;
3198 }
3199 }
3200
3201
Thierry Strudel3d639192016-09-09 11:52:26 -07003202 result.result = translateFromHalMetadata(metadata,
3203 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003204 i->capture_intent, internalPproc, i->fwkCacMode,
3205 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003206
3207 saveExifParams(metadata);
3208
3209 if (i->blob_request) {
3210 {
3211 //Dump tuning metadata if enabled and available
3212 char prop[PROPERTY_VALUE_MAX];
3213 memset(prop, 0, sizeof(prop));
3214 property_get("persist.camera.dumpmetadata", prop, "0");
3215 int32_t enabled = atoi(prop);
3216 if (enabled && metadata->is_tuning_params_valid) {
3217 dumpMetadataToFile(metadata->tuning_params,
3218 mMetaFrameCount,
3219 enabled,
3220 "Snapshot",
3221 frame_number);
3222 }
3223 }
3224 }
3225
3226 if (!internalPproc) {
3227 LOGD("couldn't find need_metadata for this metadata");
3228 // Return metadata buffer
3229 if (free_and_bufdone_meta_buf) {
3230 mMetadataChannel->bufDone(metadata_buf);
3231 free(metadata_buf);
3232 }
3233 }
3234 }
3235 if (!result.result) {
3236 LOGE("metadata is NULL");
3237 }
3238 result.frame_number = i->frame_number;
3239 result.input_buffer = i->input_buffer;
3240 result.num_output_buffers = 0;
3241 result.output_buffers = NULL;
3242 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3243 j != i->buffers.end(); j++) {
3244 if (j->buffer) {
3245 result.num_output_buffers++;
3246 }
3247 }
3248
3249 updateFpsInPreviewBuffer(metadata, i->frame_number);
3250
3251 if (result.num_output_buffers > 0) {
3252 camera3_stream_buffer_t *result_buffers =
3253 new camera3_stream_buffer_t[result.num_output_buffers];
3254 if (result_buffers != NULL) {
3255 size_t result_buffers_idx = 0;
3256 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3257 j != i->buffers.end(); j++) {
3258 if (j->buffer) {
3259 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3260 m != mPendingFrameDropList.end(); m++) {
3261 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3262 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3263 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3264 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3265 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3266 frame_number, streamID);
3267 m = mPendingFrameDropList.erase(m);
3268 break;
3269 }
3270 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003271 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003272 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3273 result_buffers[result_buffers_idx++] = *(j->buffer);
3274 free(j->buffer);
3275 j->buffer = NULL;
3276 }
3277 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003278
Thierry Strudel3d639192016-09-09 11:52:26 -07003279 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003280 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003281 LOGD("meta frame_number = %u, capture_time = %lld",
3282 result.frame_number, i->timestamp);
3283 free_camera_metadata((camera_metadata_t *)result.result);
3284 delete[] result_buffers;
3285 }else {
3286 LOGE("Fatal error: out of memory");
3287 }
3288 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003289 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 LOGD("meta frame_number = %u, capture_time = %lld",
3291 result.frame_number, i->timestamp);
3292 free_camera_metadata((camera_metadata_t *)result.result);
3293 }
3294
3295 i = erasePendingRequest(i);
3296
3297 if (!mPendingReprocessResultList.empty()) {
3298 handlePendingReprocResults(frame_number + 1);
3299 }
3300 }
3301
3302done_metadata:
3303 for (pendingRequestIterator i = mPendingRequestsList.begin();
3304 i != mPendingRequestsList.end() ;i++) {
3305 i->pipeline_depth++;
3306 }
3307 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3308 unblockRequestIfNecessary();
3309}
3310
3311/*===========================================================================
3312 * FUNCTION : hdrPlusPerfLock
3313 *
3314 * DESCRIPTION: perf lock for HDR+ using custom intent
3315 *
3316 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3317 *
3318 * RETURN : None
3319 *
3320 *==========================================================================*/
3321void QCamera3HardwareInterface::hdrPlusPerfLock(
3322 mm_camera_super_buf_t *metadata_buf)
3323{
3324 if (NULL == metadata_buf) {
3325 LOGE("metadata_buf is NULL");
3326 return;
3327 }
3328 metadata_buffer_t *metadata =
3329 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3330 int32_t *p_frame_number_valid =
3331 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3332 uint32_t *p_frame_number =
3333 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3334
3335 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3336 LOGE("%s: Invalid metadata", __func__);
3337 return;
3338 }
3339
3340 //acquire perf lock for 5 sec after the last HDR frame is captured
3341 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3342 if ((p_frame_number != NULL) &&
3343 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003344 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003345 }
3346 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003347}
3348
3349/*===========================================================================
3350 * FUNCTION : handleInputBufferWithLock
3351 *
3352 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3353 *
3354 * PARAMETERS : @frame_number: frame number of the input buffer
3355 *
3356 * RETURN :
3357 *
3358 *==========================================================================*/
3359void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3360{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003361 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003362 pendingRequestIterator i = mPendingRequestsList.begin();
3363 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3364 i++;
3365 }
3366 if (i != mPendingRequestsList.end() && i->input_buffer) {
3367 //found the right request
3368 if (!i->shutter_notified) {
3369 CameraMetadata settings;
3370 camera3_notify_msg_t notify_msg;
3371 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3372 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3373 if(i->settings) {
3374 settings = i->settings;
3375 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3376 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3377 } else {
3378 LOGE("No timestamp in input settings! Using current one.");
3379 }
3380 } else {
3381 LOGE("Input settings missing!");
3382 }
3383
3384 notify_msg.type = CAMERA3_MSG_SHUTTER;
3385 notify_msg.message.shutter.frame_number = frame_number;
3386 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003387 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003388 i->shutter_notified = true;
3389 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3390 i->frame_number, notify_msg.message.shutter.timestamp);
3391 }
3392
3393 if (i->input_buffer->release_fence != -1) {
3394 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3395 close(i->input_buffer->release_fence);
3396 if (rc != OK) {
3397 LOGE("input buffer sync wait failed %d", rc);
3398 }
3399 }
3400
3401 camera3_capture_result result;
3402 memset(&result, 0, sizeof(camera3_capture_result));
3403 result.frame_number = frame_number;
3404 result.result = i->settings;
3405 result.input_buffer = i->input_buffer;
3406 result.partial_result = PARTIAL_RESULT_COUNT;
3407
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003408 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003409 LOGD("Input request metadata and input buffer frame_number = %u",
3410 i->frame_number);
3411 i = erasePendingRequest(i);
3412 } else {
3413 LOGE("Could not find input request for frame number %d", frame_number);
3414 }
3415}
3416
3417/*===========================================================================
3418 * FUNCTION : handleBufferWithLock
3419 *
3420 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3421 *
3422 * PARAMETERS : @buffer: image buffer for the callback
3423 * @frame_number: frame number of the image buffer
3424 *
3425 * RETURN :
3426 *
3427 *==========================================================================*/
3428void QCamera3HardwareInterface::handleBufferWithLock(
3429 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3430{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003431 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003432
3433 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3434 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3435 }
3436
Thierry Strudel3d639192016-09-09 11:52:26 -07003437 /* Nothing to be done during error state */
3438 if ((ERROR == mState) || (DEINIT == mState)) {
3439 return;
3440 }
3441 if (mFlushPerf) {
3442 handleBuffersDuringFlushLock(buffer);
3443 return;
3444 }
3445 //not in flush
3446 // If the frame number doesn't exist in the pending request list,
3447 // directly send the buffer to the frameworks, and update pending buffers map
3448 // Otherwise, book-keep the buffer.
3449 pendingRequestIterator i = mPendingRequestsList.begin();
3450 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3451 i++;
3452 }
3453 if (i == mPendingRequestsList.end()) {
3454 // Verify all pending requests frame_numbers are greater
3455 for (pendingRequestIterator j = mPendingRequestsList.begin();
3456 j != mPendingRequestsList.end(); j++) {
3457 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3458 LOGW("Error: pending live frame number %d is smaller than %d",
3459 j->frame_number, frame_number);
3460 }
3461 }
3462 camera3_capture_result_t result;
3463 memset(&result, 0, sizeof(camera3_capture_result_t));
3464 result.result = NULL;
3465 result.frame_number = frame_number;
3466 result.num_output_buffers = 1;
3467 result.partial_result = 0;
3468 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3469 m != mPendingFrameDropList.end(); m++) {
3470 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3471 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3472 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3473 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3474 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3475 frame_number, streamID);
3476 m = mPendingFrameDropList.erase(m);
3477 break;
3478 }
3479 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003480 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 result.output_buffers = buffer;
3482 LOGH("result frame_number = %d, buffer = %p",
3483 frame_number, buffer->buffer);
3484
3485 mPendingBuffersMap.removeBuf(buffer->buffer);
3486
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003487 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003488 } else {
3489 if (i->input_buffer) {
3490 CameraMetadata settings;
3491 camera3_notify_msg_t notify_msg;
3492 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3493 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3494 if(i->settings) {
3495 settings = i->settings;
3496 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3497 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3498 } else {
3499 LOGW("No timestamp in input settings! Using current one.");
3500 }
3501 } else {
3502 LOGE("Input settings missing!");
3503 }
3504
3505 notify_msg.type = CAMERA3_MSG_SHUTTER;
3506 notify_msg.message.shutter.frame_number = frame_number;
3507 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3508
3509 if (i->input_buffer->release_fence != -1) {
3510 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3511 close(i->input_buffer->release_fence);
3512 if (rc != OK) {
3513 LOGE("input buffer sync wait failed %d", rc);
3514 }
3515 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003516 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 mPendingBuffersMap.removeBuf(buffer->buffer);
3518
Thierry Strudel04e026f2016-10-10 11:27:36 -07003519 camera3_capture_result result;
3520 memset(&result, 0, sizeof(camera3_capture_result));
3521 result.frame_number = frame_number;
3522 result.result = i->settings;
3523 result.input_buffer = i->input_buffer;
3524 result.num_output_buffers = 1;
3525 result.output_buffers = buffer;
3526 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003527
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003528 orchestrateNotify(&notify_msg);
3529 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003530 LOGD("Notify reprocess now %d!", frame_number);
3531 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003532 } else {
3533 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3534 j != i->buffers.end(); j++) {
3535 if (j->stream == buffer->stream) {
3536 if (j->buffer != NULL) {
3537 LOGE("Error: buffer is already set");
3538 } else {
3539 j->buffer = (camera3_stream_buffer_t *)malloc(
3540 sizeof(camera3_stream_buffer_t));
3541 *(j->buffer) = *buffer;
3542 LOGH("cache buffer %p at result frame_number %u",
3543 buffer->buffer, frame_number);
3544 }
3545 }
3546 }
3547 }
3548 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003549
3550 if (mPreviewStarted == false) {
3551 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3552 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3553 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3554 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3555 mPreviewStarted = true;
3556
3557 // Set power hint for preview
3558 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3559 }
3560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561}
3562
3563/*===========================================================================
3564 * FUNCTION : unblockRequestIfNecessary
3565 *
3566 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3567 * that mMutex is held when this function is called.
3568 *
3569 * PARAMETERS :
3570 *
3571 * RETURN :
3572 *
3573 *==========================================================================*/
3574void QCamera3HardwareInterface::unblockRequestIfNecessary()
3575{
3576 // Unblock process_capture_request
3577 pthread_cond_signal(&mRequestCond);
3578}
3579
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003580/*===========================================================================
3581 * FUNCTION : isHdrSnapshotRequest
3582 *
3583 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3584 *
3585 * PARAMETERS : camera3 request structure
3586 *
3587 * RETURN : boolean decision variable
3588 *
3589 *==========================================================================*/
3590bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3591{
3592 if (request == NULL) {
3593 LOGE("Invalid request handle");
3594 assert(0);
3595 return false;
3596 }
3597
3598 if (!mForceHdrSnapshot) {
3599 CameraMetadata frame_settings;
3600 frame_settings = request->settings;
3601
3602 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3603 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3604 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3605 return false;
3606 }
3607 } else {
3608 return false;
3609 }
3610
3611 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3612 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3613 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3614 return false;
3615 }
3616 } else {
3617 return false;
3618 }
3619 }
3620
3621 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3622 if (request->output_buffers[i].stream->format
3623 == HAL_PIXEL_FORMAT_BLOB) {
3624 return true;
3625 }
3626 }
3627
3628 return false;
3629}
3630/*===========================================================================
3631 * FUNCTION : orchestrateRequest
3632 *
3633 * DESCRIPTION: Orchestrates a capture request from camera service
3634 *
3635 * PARAMETERS :
3636 * @request : request from framework to process
3637 *
3638 * RETURN : Error status codes
3639 *
3640 *==========================================================================*/
3641int32_t QCamera3HardwareInterface::orchestrateRequest(
3642 camera3_capture_request_t *request)
3643{
3644
3645 uint32_t originalFrameNumber = request->frame_number;
3646 uint32_t originalOutputCount = request->num_output_buffers;
3647 const camera_metadata_t *original_settings = request->settings;
3648 List<InternalRequest> internallyRequestedStreams;
3649 List<InternalRequest> emptyInternalList;
3650
3651 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3652 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3653 uint32_t internalFrameNumber;
3654 CameraMetadata modified_meta;
3655
3656
3657 /* Add Blob channel to list of internally requested streams */
3658 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3659 if (request->output_buffers[i].stream->format
3660 == HAL_PIXEL_FORMAT_BLOB) {
3661 InternalRequest streamRequested;
3662 streamRequested.meteringOnly = 1;
3663 streamRequested.need_metadata = 0;
3664 streamRequested.stream = request->output_buffers[i].stream;
3665 internallyRequestedStreams.push_back(streamRequested);
3666 }
3667 }
3668 request->num_output_buffers = 0;
3669 auto itr = internallyRequestedStreams.begin();
3670
3671 /* Modify setting to set compensation */
3672 modified_meta = request->settings;
3673 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3674 uint8_t aeLock = 1;
3675 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3676 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3677 camera_metadata_t *modified_settings = modified_meta.release();
3678 request->settings = modified_settings;
3679
3680 /* Capture Settling & -2x frame */
3681 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3682 request->frame_number = internalFrameNumber;
3683 processCaptureRequest(request, internallyRequestedStreams);
3684
3685 request->num_output_buffers = originalOutputCount;
3686 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3687 request->frame_number = internalFrameNumber;
3688 processCaptureRequest(request, emptyInternalList);
3689 request->num_output_buffers = 0;
3690
3691 modified_meta = modified_settings;
3692 expCompensation = 0;
3693 aeLock = 1;
3694 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3695 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3696 modified_settings = modified_meta.release();
3697 request->settings = modified_settings;
3698
3699 /* Capture Settling & 0X frame */
3700
3701 itr = internallyRequestedStreams.begin();
3702 if (itr == internallyRequestedStreams.end()) {
3703 LOGE("Error Internally Requested Stream list is empty");
3704 assert(0);
3705 } else {
3706 itr->need_metadata = 0;
3707 itr->meteringOnly = 1;
3708 }
3709
3710 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3711 request->frame_number = internalFrameNumber;
3712 processCaptureRequest(request, internallyRequestedStreams);
3713
3714 itr = internallyRequestedStreams.begin();
3715 if (itr == internallyRequestedStreams.end()) {
3716 ALOGE("Error Internally Requested Stream list is empty");
3717 assert(0);
3718 } else {
3719 itr->need_metadata = 1;
3720 itr->meteringOnly = 0;
3721 }
3722
3723 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3724 request->frame_number = internalFrameNumber;
3725 processCaptureRequest(request, internallyRequestedStreams);
3726
3727 /* Capture 2X frame*/
3728 modified_meta = modified_settings;
3729 expCompensation = GB_HDR_2X_STEP_EV;
3730 aeLock = 1;
3731 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3732 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3733 modified_settings = modified_meta.release();
3734 request->settings = modified_settings;
3735
3736 itr = internallyRequestedStreams.begin();
3737 if (itr == internallyRequestedStreams.end()) {
3738 ALOGE("Error Internally Requested Stream list is empty");
3739 assert(0);
3740 } else {
3741 itr->need_metadata = 0;
3742 itr->meteringOnly = 1;
3743 }
3744 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3745 request->frame_number = internalFrameNumber;
3746 processCaptureRequest(request, internallyRequestedStreams);
3747
3748 itr = internallyRequestedStreams.begin();
3749 if (itr == internallyRequestedStreams.end()) {
3750 ALOGE("Error Internally Requested Stream list is empty");
3751 assert(0);
3752 } else {
3753 itr->need_metadata = 1;
3754 itr->meteringOnly = 0;
3755 }
3756
3757 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3758 request->frame_number = internalFrameNumber;
3759 processCaptureRequest(request, internallyRequestedStreams);
3760
3761
3762 /* Capture 2X on original streaming config*/
3763 internallyRequestedStreams.clear();
3764
3765 /* Restore original settings pointer */
3766 request->settings = original_settings;
3767 } else {
3768 uint32_t internalFrameNumber;
3769 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3770 request->frame_number = internalFrameNumber;
3771 return processCaptureRequest(request, internallyRequestedStreams);
3772 }
3773
3774 return NO_ERROR;
3775}
3776
3777/*===========================================================================
3778 * FUNCTION : orchestrateResult
3779 *
3780 * DESCRIPTION: Orchestrates a capture result to camera service
3781 *
3782 * PARAMETERS :
3783 * @request : request from framework to process
3784 *
3785 * RETURN :
3786 *
3787 *==========================================================================*/
3788void QCamera3HardwareInterface::orchestrateResult(
3789 camera3_capture_result_t *result)
3790{
3791 uint32_t frameworkFrameNumber;
3792 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3793 frameworkFrameNumber);
3794 if (rc != NO_ERROR) {
3795 LOGE("Cannot find translated frameworkFrameNumber");
3796 assert(0);
3797 } else {
3798 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3799 LOGD("CAM_DEBUG Internal Request drop the result");
3800 } else {
3801 result->frame_number = frameworkFrameNumber;
3802 mCallbackOps->process_capture_result(mCallbackOps, result);
3803 }
3804 }
3805}
3806
3807/*===========================================================================
3808 * FUNCTION : orchestrateNotify
3809 *
3810 * DESCRIPTION: Orchestrates a notify to camera service
3811 *
3812 * PARAMETERS :
3813 * @request : request from framework to process
3814 *
3815 * RETURN :
3816 *
3817 *==========================================================================*/
3818void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3819{
3820 uint32_t frameworkFrameNumber;
3821 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3822 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3823 frameworkFrameNumber);
3824 if (rc != NO_ERROR) {
3825 LOGE("Cannot find translated frameworkFrameNumber");
3826 assert(0);
3827 } else {
3828 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3829 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3830 } else {
3831 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3832 mCallbackOps->notify(mCallbackOps, notify_msg);
3833 }
3834 }
3835}
3836
3837/*===========================================================================
3838 * FUNCTION : FrameNumberRegistry
3839 *
3840 * DESCRIPTION: Constructor
3841 *
3842 * PARAMETERS :
3843 *
3844 * RETURN :
3845 *
3846 *==========================================================================*/
3847FrameNumberRegistry::FrameNumberRegistry()
3848{
3849 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3850}
3851
3852/*===========================================================================
3853 * FUNCTION : ~FrameNumberRegistry
3854 *
3855 * DESCRIPTION: Destructor
3856 *
3857 * PARAMETERS :
3858 *
3859 * RETURN :
3860 *
3861 *==========================================================================*/
3862FrameNumberRegistry::~FrameNumberRegistry()
3863{
3864}
3865
3866/*===========================================================================
3867 * FUNCTION : PurgeOldEntriesLocked
3868 *
3869 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3870 *
3871 * PARAMETERS :
3872 *
3873 * RETURN : NONE
3874 *
3875 *==========================================================================*/
3876void FrameNumberRegistry::purgeOldEntriesLocked()
3877{
3878 while (_register.begin() != _register.end()) {
3879 auto itr = _register.begin();
3880 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3881 _register.erase(itr);
3882 } else {
3883 return;
3884 }
3885 }
3886}
3887
3888/*===========================================================================
3889 * FUNCTION : allocStoreInternalFrameNumber
3890 *
3891 * DESCRIPTION: Method to note down a framework request and associate a new
3892 * internal request number against it
3893 *
3894 * PARAMETERS :
3895 * @fFrameNumber: Identifier given by framework
3896 * @internalFN : Output parameter which will have the newly generated internal
3897 * entry
3898 *
3899 * RETURN : Error code
3900 *
3901 *==========================================================================*/
3902int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3903 uint32_t &internalFrameNumber)
3904{
3905 Mutex::Autolock lock(mRegistryLock);
3906 internalFrameNumber = _nextFreeInternalNumber++;
3907 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3908 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3909 purgeOldEntriesLocked();
3910 return NO_ERROR;
3911}
3912
3913/*===========================================================================
3914 * FUNCTION : generateStoreInternalFrameNumber
3915 *
3916 * DESCRIPTION: Method to associate a new internal request number independent
3917 * of any associate with framework requests
3918 *
3919 * PARAMETERS :
3920 * @internalFrame#: Output parameter which will have the newly generated internal
3921 *
3922 *
3923 * RETURN : Error code
3924 *
3925 *==========================================================================*/
3926int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3927{
3928 Mutex::Autolock lock(mRegistryLock);
3929 internalFrameNumber = _nextFreeInternalNumber++;
3930 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3931 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3932 purgeOldEntriesLocked();
3933 return NO_ERROR;
3934}
3935
3936/*===========================================================================
3937 * FUNCTION : getFrameworkFrameNumber
3938 *
3939 * DESCRIPTION: Method to query the framework framenumber given an internal #
3940 *
3941 * PARAMETERS :
3942 * @internalFrame#: Internal reference
3943 * @frameworkframenumber: Output parameter holding framework frame entry
3944 *
3945 * RETURN : Error code
3946 *
3947 *==========================================================================*/
3948int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3949 uint32_t &frameworkFrameNumber)
3950{
3951 Mutex::Autolock lock(mRegistryLock);
3952 auto itr = _register.find(internalFrameNumber);
3953 if (itr == _register.end()) {
3954 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3955 return -ENOENT;
3956 }
3957
3958 frameworkFrameNumber = itr->second;
3959 purgeOldEntriesLocked();
3960 return NO_ERROR;
3961}
Thierry Strudel3d639192016-09-09 11:52:26 -07003962
3963/*===========================================================================
3964 * FUNCTION : processCaptureRequest
3965 *
3966 * DESCRIPTION: process a capture request from camera service
3967 *
3968 * PARAMETERS :
3969 * @request : request from framework to process
3970 *
3971 * RETURN :
3972 *
3973 *==========================================================================*/
3974int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003975 camera3_capture_request_t *request,
3976 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003977{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003978 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003979 int rc = NO_ERROR;
3980 int32_t request_id;
3981 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003982 bool isVidBufRequested = false;
3983 camera3_stream_buffer_t *pInputBuffer = NULL;
3984
3985 pthread_mutex_lock(&mMutex);
3986
3987 // Validate current state
3988 switch (mState) {
3989 case CONFIGURED:
3990 case STARTED:
3991 /* valid state */
3992 break;
3993
3994 case ERROR:
3995 pthread_mutex_unlock(&mMutex);
3996 handleCameraDeviceError();
3997 return -ENODEV;
3998
3999 default:
4000 LOGE("Invalid state %d", mState);
4001 pthread_mutex_unlock(&mMutex);
4002 return -ENODEV;
4003 }
4004
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004005 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004006 if (rc != NO_ERROR) {
4007 LOGE("incoming request is not valid");
4008 pthread_mutex_unlock(&mMutex);
4009 return rc;
4010 }
4011
4012 meta = request->settings;
4013
4014 // For first capture request, send capture intent, and
4015 // stream on all streams
4016 if (mState == CONFIGURED) {
4017 // send an unconfigure to the backend so that the isp
4018 // resources are deallocated
4019 if (!mFirstConfiguration) {
4020 cam_stream_size_info_t stream_config_info;
4021 int32_t hal_version = CAM_HAL_V3;
4022 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4023 stream_config_info.buffer_info.min_buffers =
4024 MIN_INFLIGHT_REQUESTS;
4025 stream_config_info.buffer_info.max_buffers =
4026 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4027 clear_metadata_buffer(mParameters);
4028 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4029 CAM_INTF_PARM_HAL_VERSION, hal_version);
4030 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4031 CAM_INTF_META_STREAM_INFO, stream_config_info);
4032 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4033 mParameters);
4034 if (rc < 0) {
4035 LOGE("set_parms for unconfigure failed");
4036 pthread_mutex_unlock(&mMutex);
4037 return rc;
4038 }
4039 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004040 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004041 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004042 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004043 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004044 property_get("persist.camera.is_type", is_type_value, "4");
4045 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4046 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4047 property_get("persist.camera.is_type_preview", is_type_value, "4");
4048 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4049 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004050
4051 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4052 int32_t hal_version = CAM_HAL_V3;
4053 uint8_t captureIntent =
4054 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4055 mCaptureIntent = captureIntent;
4056 clear_metadata_buffer(mParameters);
4057 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4058 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4059 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004060 if (mFirstConfiguration) {
4061 // configure instant AEC
4062 // Instant AEC is a session based parameter and it is needed only
4063 // once per complete session after open camera.
4064 // i.e. This is set only once for the first capture request, after open camera.
4065 setInstantAEC(meta);
4066 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004067 uint8_t fwkVideoStabMode=0;
4068 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4069 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4070 }
4071
4072 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4073 // turn it on for video/preview
4074 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4075 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004076 int32_t vsMode;
4077 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4079 rc = BAD_VALUE;
4080 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004081 LOGD("setEis %d", setEis);
4082 bool eis3Supported = false;
4083 size_t count = IS_TYPE_MAX;
4084 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4085 for (size_t i = 0; i < count; i++) {
4086 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4087 eis3Supported = true;
4088 break;
4089 }
4090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004091
4092 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004093 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004094 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4095 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004096 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4097 is_type = isTypePreview;
4098 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4099 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4100 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004101 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004102 } else {
4103 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004105 } else {
4106 is_type = IS_TYPE_NONE;
4107 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004108 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004109 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004110 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4111 }
4112 }
4113
4114 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4115 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4116
4117 int32_t tintless_value = 1;
4118 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4119 CAM_INTF_PARM_TINTLESS, tintless_value);
4120 //Disable CDS for HFR mode or if DIS/EIS is on.
4121 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4122 //after every configure_stream
4123 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4124 (m_bIsVideo)) {
4125 int32_t cds = CAM_CDS_MODE_OFF;
4126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4127 CAM_INTF_PARM_CDS_MODE, cds))
4128 LOGE("Failed to disable CDS for HFR mode");
4129
4130 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004131
4132 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4133 uint8_t* use_av_timer = NULL;
4134
4135 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004136 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004137 use_av_timer = &m_debug_avtimer;
4138 }
4139 else{
4140 use_av_timer =
4141 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004142 if (use_av_timer) {
4143 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004145 }
4146
4147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4148 rc = BAD_VALUE;
4149 }
4150 }
4151
Thierry Strudel3d639192016-09-09 11:52:26 -07004152 setMobicat();
4153
4154 /* Set fps and hfr mode while sending meta stream info so that sensor
4155 * can configure appropriate streaming mode */
4156 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004157 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4158 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004159 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4160 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004161 if (rc == NO_ERROR) {
4162 int32_t max_fps =
4163 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
4164 if (max_fps == 60) {
4165 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4166 }
4167 /* For HFR, more buffers are dequeued upfront to improve the performance */
4168 if (mBatchSize) {
4169 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4170 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4171 }
4172 }
4173 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004174 LOGE("setHalFpsRange failed");
4175 }
4176 }
4177 if (meta.exists(ANDROID_CONTROL_MODE)) {
4178 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4179 rc = extractSceneMode(meta, metaMode, mParameters);
4180 if (rc != NO_ERROR) {
4181 LOGE("extractSceneMode failed");
4182 }
4183 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004184 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004185
Thierry Strudel04e026f2016-10-10 11:27:36 -07004186 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4187 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4188 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4189 rc = setVideoHdrMode(mParameters, vhdr);
4190 if (rc != NO_ERROR) {
4191 LOGE("setVideoHDR is failed");
4192 }
4193 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004194
Thierry Strudel3d639192016-09-09 11:52:26 -07004195 //TODO: validate the arguments, HSV scenemode should have only the
4196 //advertised fps ranges
4197
4198 /*set the capture intent, hal version, tintless, stream info,
4199 *and disenable parameters to the backend*/
4200 LOGD("set_parms META_STREAM_INFO " );
4201 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4202 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004203 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004204 mStreamConfigInfo.type[i],
4205 mStreamConfigInfo.stream_sizes[i].width,
4206 mStreamConfigInfo.stream_sizes[i].height,
4207 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004208 mStreamConfigInfo.format[i],
4209 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004210 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004211
Thierry Strudel3d639192016-09-09 11:52:26 -07004212 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4213 mParameters);
4214 if (rc < 0) {
4215 LOGE("set_parms failed for hal version, stream info");
4216 }
4217
4218 cam_dimension_t sensor_dim;
4219 memset(&sensor_dim, 0, sizeof(sensor_dim));
4220 rc = getSensorOutputSize(sensor_dim);
4221 if (rc != NO_ERROR) {
4222 LOGE("Failed to get sensor output size");
4223 pthread_mutex_unlock(&mMutex);
4224 goto error_exit;
4225 }
4226
4227 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4228 gCamCapability[mCameraId]->active_array_size.height,
4229 sensor_dim.width, sensor_dim.height);
4230
4231 /* Set batchmode before initializing channel. Since registerBuffer
4232 * internally initializes some of the channels, better set batchmode
4233 * even before first register buffer */
4234 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4235 it != mStreamInfo.end(); it++) {
4236 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4237 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4238 && mBatchSize) {
4239 rc = channel->setBatchSize(mBatchSize);
4240 //Disable per frame map unmap for HFR/batchmode case
4241 rc |= channel->setPerFrameMapUnmap(false);
4242 if (NO_ERROR != rc) {
4243 LOGE("Channel init failed %d", rc);
4244 pthread_mutex_unlock(&mMutex);
4245 goto error_exit;
4246 }
4247 }
4248 }
4249
4250 //First initialize all streams
4251 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4252 it != mStreamInfo.end(); it++) {
4253 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4254 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4255 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004256 setEis) {
4257 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4258 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4259 is_type = mStreamConfigInfo.is_type[i];
4260 break;
4261 }
4262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004263 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004264 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004265 rc = channel->initialize(IS_TYPE_NONE);
4266 }
4267 if (NO_ERROR != rc) {
4268 LOGE("Channel initialization failed %d", rc);
4269 pthread_mutex_unlock(&mMutex);
4270 goto error_exit;
4271 }
4272 }
4273
4274 if (mRawDumpChannel) {
4275 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4276 if (rc != NO_ERROR) {
4277 LOGE("Error: Raw Dump Channel init failed");
4278 pthread_mutex_unlock(&mMutex);
4279 goto error_exit;
4280 }
4281 }
4282 if (mSupportChannel) {
4283 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4284 if (rc < 0) {
4285 LOGE("Support channel initialization failed");
4286 pthread_mutex_unlock(&mMutex);
4287 goto error_exit;
4288 }
4289 }
4290 if (mAnalysisChannel) {
4291 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4292 if (rc < 0) {
4293 LOGE("Analysis channel initialization failed");
4294 pthread_mutex_unlock(&mMutex);
4295 goto error_exit;
4296 }
4297 }
4298 if (mDummyBatchChannel) {
4299 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4300 if (rc < 0) {
4301 LOGE("mDummyBatchChannel setBatchSize failed");
4302 pthread_mutex_unlock(&mMutex);
4303 goto error_exit;
4304 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004305 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004306 if (rc < 0) {
4307 LOGE("mDummyBatchChannel initialization failed");
4308 pthread_mutex_unlock(&mMutex);
4309 goto error_exit;
4310 }
4311 }
4312
4313 // Set bundle info
4314 rc = setBundleInfo();
4315 if (rc < 0) {
4316 LOGE("setBundleInfo failed %d", rc);
4317 pthread_mutex_unlock(&mMutex);
4318 goto error_exit;
4319 }
4320
4321 //update settings from app here
4322 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4323 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4324 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4325 }
4326 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4327 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4328 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4329 }
4330 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4331 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4332 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4333
4334 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4335 (mLinkedCameraId != mCameraId) ) {
4336 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4337 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004338 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004339 goto error_exit;
4340 }
4341 }
4342
4343 // add bundle related cameras
4344 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4345 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004346 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4347 &m_pDualCamCmdPtr->bundle_info;
4348 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004349 if (mIsDeviceLinked)
4350 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4351 else
4352 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4353
4354 pthread_mutex_lock(&gCamLock);
4355
4356 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4357 LOGE("Dualcam: Invalid Session Id ");
4358 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004359 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004360 goto error_exit;
4361 }
4362
4363 if (mIsMainCamera == 1) {
4364 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4365 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004366 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004367 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004368 // related session id should be session id of linked session
4369 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4370 } else {
4371 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4372 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004373 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004374 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004375 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4376 }
4377 pthread_mutex_unlock(&gCamLock);
4378
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004379 rc = mCameraHandle->ops->set_dual_cam_cmd(
4380 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004381 if (rc < 0) {
4382 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004383 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004384 goto error_exit;
4385 }
4386 }
4387
4388 //Then start them.
4389 LOGH("Start META Channel");
4390 rc = mMetadataChannel->start();
4391 if (rc < 0) {
4392 LOGE("META channel start failed");
4393 pthread_mutex_unlock(&mMutex);
4394 goto error_exit;
4395 }
4396
4397 if (mAnalysisChannel) {
4398 rc = mAnalysisChannel->start();
4399 if (rc < 0) {
4400 LOGE("Analysis channel start failed");
4401 mMetadataChannel->stop();
4402 pthread_mutex_unlock(&mMutex);
4403 goto error_exit;
4404 }
4405 }
4406
4407 if (mSupportChannel) {
4408 rc = mSupportChannel->start();
4409 if (rc < 0) {
4410 LOGE("Support channel start failed");
4411 mMetadataChannel->stop();
4412 /* Although support and analysis are mutually exclusive today
4413 adding it in anycase for future proofing */
4414 if (mAnalysisChannel) {
4415 mAnalysisChannel->stop();
4416 }
4417 pthread_mutex_unlock(&mMutex);
4418 goto error_exit;
4419 }
4420 }
4421 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4422 it != mStreamInfo.end(); it++) {
4423 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4424 LOGH("Start Processing Channel mask=%d",
4425 channel->getStreamTypeMask());
4426 rc = channel->start();
4427 if (rc < 0) {
4428 LOGE("channel start failed");
4429 pthread_mutex_unlock(&mMutex);
4430 goto error_exit;
4431 }
4432 }
4433
4434 if (mRawDumpChannel) {
4435 LOGD("Starting raw dump stream");
4436 rc = mRawDumpChannel->start();
4437 if (rc != NO_ERROR) {
4438 LOGE("Error Starting Raw Dump Channel");
4439 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4440 it != mStreamInfo.end(); it++) {
4441 QCamera3Channel *channel =
4442 (QCamera3Channel *)(*it)->stream->priv;
4443 LOGH("Stopping Processing Channel mask=%d",
4444 channel->getStreamTypeMask());
4445 channel->stop();
4446 }
4447 if (mSupportChannel)
4448 mSupportChannel->stop();
4449 if (mAnalysisChannel) {
4450 mAnalysisChannel->stop();
4451 }
4452 mMetadataChannel->stop();
4453 pthread_mutex_unlock(&mMutex);
4454 goto error_exit;
4455 }
4456 }
4457
4458 if (mChannelHandle) {
4459
4460 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4461 mChannelHandle);
4462 if (rc != NO_ERROR) {
4463 LOGE("start_channel failed %d", rc);
4464 pthread_mutex_unlock(&mMutex);
4465 goto error_exit;
4466 }
4467 }
4468
4469 goto no_error;
4470error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004471 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004472 return rc;
4473no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004474 mWokenUpByDaemon = false;
4475 mPendingLiveRequest = 0;
4476 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004477 }
4478
4479 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004480 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004481
4482 if (mFlushPerf) {
4483 //we cannot accept any requests during flush
4484 LOGE("process_capture_request cannot proceed during flush");
4485 pthread_mutex_unlock(&mMutex);
4486 return NO_ERROR; //should return an error
4487 }
4488
4489 if (meta.exists(ANDROID_REQUEST_ID)) {
4490 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4491 mCurrentRequestId = request_id;
4492 LOGD("Received request with id: %d", request_id);
4493 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4494 LOGE("Unable to find request id field, \
4495 & no previous id available");
4496 pthread_mutex_unlock(&mMutex);
4497 return NAME_NOT_FOUND;
4498 } else {
4499 LOGD("Re-using old request id");
4500 request_id = mCurrentRequestId;
4501 }
4502
4503 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4504 request->num_output_buffers,
4505 request->input_buffer,
4506 frameNumber);
4507 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004508 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004509 int blob_request = 0;
4510 uint32_t snapshotStreamId = 0;
4511 for (size_t i = 0; i < request->num_output_buffers; i++) {
4512 const camera3_stream_buffer_t& output = request->output_buffers[i];
4513 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4514
4515 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004516 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004517 blob_request = 1;
4518 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4519 }
4520
4521 if (output.acquire_fence != -1) {
4522 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4523 close(output.acquire_fence);
4524 if (rc != OK) {
4525 LOGE("sync wait failed %d", rc);
4526 pthread_mutex_unlock(&mMutex);
4527 return rc;
4528 }
4529 }
4530
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004531 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004532 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004533
4534 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4535 isVidBufRequested = true;
4536 }
4537 }
4538
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004539 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4540 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4541 itr++) {
4542 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4543 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4544 channel->getStreamID(channel->getStreamTypeMask());
4545
4546 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4547 isVidBufRequested = true;
4548 }
4549 }
4550
Thierry Strudel3d639192016-09-09 11:52:26 -07004551 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004552 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004553 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004554 }
4555 if (blob_request && mRawDumpChannel) {
4556 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004557 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004558 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004559 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004560 }
4561
4562 if(request->input_buffer == NULL) {
4563 /* Parse the settings:
4564 * - For every request in NORMAL MODE
4565 * - For every request in HFR mode during preview only case
4566 * - For first request of every batch in HFR mode during video
4567 * recording. In batchmode the same settings except frame number is
4568 * repeated in each request of the batch.
4569 */
4570 if (!mBatchSize ||
4571 (mBatchSize && !isVidBufRequested) ||
4572 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004573 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004574 if (rc < 0) {
4575 LOGE("fail to set frame parameters");
4576 pthread_mutex_unlock(&mMutex);
4577 return rc;
4578 }
4579 }
4580 /* For batchMode HFR, setFrameParameters is not called for every
4581 * request. But only frame number of the latest request is parsed.
4582 * Keep track of first and last frame numbers in a batch so that
4583 * metadata for the frame numbers of batch can be duplicated in
4584 * handleBatchMetadta */
4585 if (mBatchSize) {
4586 if (!mToBeQueuedVidBufs) {
4587 //start of the batch
4588 mFirstFrameNumberInBatch = request->frame_number;
4589 }
4590 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4591 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4592 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004593 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004594 return BAD_VALUE;
4595 }
4596 }
4597 if (mNeedSensorRestart) {
4598 /* Unlock the mutex as restartSensor waits on the channels to be
4599 * stopped, which in turn calls stream callback functions -
4600 * handleBufferWithLock and handleMetadataWithLock */
4601 pthread_mutex_unlock(&mMutex);
4602 rc = dynamicUpdateMetaStreamInfo();
4603 if (rc != NO_ERROR) {
4604 LOGE("Restarting the sensor failed");
4605 return BAD_VALUE;
4606 }
4607 mNeedSensorRestart = false;
4608 pthread_mutex_lock(&mMutex);
4609 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004610 if(mResetInstantAEC) {
4611 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4612 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4613 mResetInstantAEC = false;
4614 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004615 } else {
4616
4617 if (request->input_buffer->acquire_fence != -1) {
4618 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4619 close(request->input_buffer->acquire_fence);
4620 if (rc != OK) {
4621 LOGE("input buffer sync wait failed %d", rc);
4622 pthread_mutex_unlock(&mMutex);
4623 return rc;
4624 }
4625 }
4626 }
4627
4628 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4629 mLastCustIntentFrmNum = frameNumber;
4630 }
4631 /* Update pending request list and pending buffers map */
4632 PendingRequestInfo pendingRequest;
4633 pendingRequestIterator latestRequest;
4634 pendingRequest.frame_number = frameNumber;
4635 pendingRequest.num_buffers = request->num_output_buffers;
4636 pendingRequest.request_id = request_id;
4637 pendingRequest.blob_request = blob_request;
4638 pendingRequest.timestamp = 0;
4639 pendingRequest.bUrgentReceived = 0;
4640 if (request->input_buffer) {
4641 pendingRequest.input_buffer =
4642 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4643 *(pendingRequest.input_buffer) = *(request->input_buffer);
4644 pInputBuffer = pendingRequest.input_buffer;
4645 } else {
4646 pendingRequest.input_buffer = NULL;
4647 pInputBuffer = NULL;
4648 }
4649
4650 pendingRequest.pipeline_depth = 0;
4651 pendingRequest.partial_result_cnt = 0;
4652 extractJpegMetadata(mCurJpegMeta, request);
4653 pendingRequest.jpegMetadata = mCurJpegMeta;
4654 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4655 pendingRequest.shutter_notified = false;
4656
4657 //extract capture intent
4658 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4659 mCaptureIntent =
4660 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4661 }
4662 pendingRequest.capture_intent = mCaptureIntent;
4663
4664 //extract CAC info
4665 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4666 mCacMode =
4667 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4668 }
4669 pendingRequest.fwkCacMode = mCacMode;
4670
4671 PendingBuffersInRequest bufsForCurRequest;
4672 bufsForCurRequest.frame_number = frameNumber;
4673 // Mark current timestamp for the new request
4674 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4675
4676 for (size_t i = 0; i < request->num_output_buffers; i++) {
4677 RequestedBufferInfo requestedBuf;
4678 memset(&requestedBuf, 0, sizeof(requestedBuf));
4679 requestedBuf.stream = request->output_buffers[i].stream;
4680 requestedBuf.buffer = NULL;
4681 pendingRequest.buffers.push_back(requestedBuf);
4682
4683 // Add to buffer handle the pending buffers list
4684 PendingBufferInfo bufferInfo;
4685 bufferInfo.buffer = request->output_buffers[i].buffer;
4686 bufferInfo.stream = request->output_buffers[i].stream;
4687 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4688 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4689 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4690 frameNumber, bufferInfo.buffer,
4691 channel->getStreamTypeMask(), bufferInfo.stream->format);
4692 }
4693 // Add this request packet into mPendingBuffersMap
4694 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4695 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4696 mPendingBuffersMap.get_num_overall_buffers());
4697
4698 latestRequest = mPendingRequestsList.insert(
4699 mPendingRequestsList.end(), pendingRequest);
4700 if(mFlush) {
4701 LOGI("mFlush is true");
4702 pthread_mutex_unlock(&mMutex);
4703 return NO_ERROR;
4704 }
4705
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004706 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004707 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004708 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004709
4710 if(request->input_buffer != NULL){
4711 LOGD("Input request, frame_number %d", frameNumber);
4712 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4713 if (NO_ERROR != rc) {
4714 LOGE("fail to set reproc parameters");
4715 pthread_mutex_unlock(&mMutex);
4716 return rc;
4717 }
4718 }
4719
4720 // Call request on other streams
4721 uint32_t streams_need_metadata = 0;
4722 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4723 for (size_t i = 0; i < request->num_output_buffers; i++) {
4724 const camera3_stream_buffer_t& output = request->output_buffers[i];
4725 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4726
4727 if (channel == NULL) {
4728 LOGW("invalid channel pointer for stream");
4729 continue;
4730 }
4731
4732 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4733 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4734 output.buffer, request->input_buffer, frameNumber);
4735 if(request->input_buffer != NULL){
4736 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004737 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 if (rc < 0) {
4739 LOGE("Fail to request on picture channel");
4740 pthread_mutex_unlock(&mMutex);
4741 return rc;
4742 }
4743 } else {
4744 LOGD("snapshot request with buffer %p, frame_number %d",
4745 output.buffer, frameNumber);
4746 if (!request->settings) {
4747 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004748 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 } else {
4750 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004751 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 }
4753 if (rc < 0) {
4754 LOGE("Fail to request on picture channel");
4755 pthread_mutex_unlock(&mMutex);
4756 return rc;
4757 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004758
4759 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4760 uint32_t j = 0;
4761 for (j = 0; j < streamsArray.num_streams; j++) {
4762 if (streamsArray.stream_request[j].streamID == streamId) {
4763 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4764 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4765 else
4766 streamsArray.stream_request[j].buf_index = indexUsed;
4767 break;
4768 }
4769 }
4770 if (j == streamsArray.num_streams) {
4771 LOGE("Did not find matching stream to update index");
4772 assert(0);
4773 }
4774
Thierry Strudel3d639192016-09-09 11:52:26 -07004775 pendingBufferIter->need_metadata = true;
4776 streams_need_metadata++;
4777 }
4778 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4779 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004780 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4781 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004782 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4783 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004784 if (rc < 0) {
4785 LOGE("Fail to request on YUV channel");
4786 pthread_mutex_unlock(&mMutex);
4787 return rc;
4788 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004789
4790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4791 uint32_t j = 0;
4792 for (j = 0; j < streamsArray.num_streams; j++) {
4793 if (streamsArray.stream_request[j].streamID == streamId) {
4794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4796 else
4797 streamsArray.stream_request[j].buf_index = indexUsed;
4798 break;
4799 }
4800 }
4801 if (j == streamsArray.num_streams) {
4802 LOGE("Did not find matching stream to update index");
4803 assert(0);
4804 }
4805
Thierry Strudel3d639192016-09-09 11:52:26 -07004806 pendingBufferIter->need_metadata = needMetadata;
4807 if (needMetadata)
4808 streams_need_metadata += 1;
4809 LOGD("calling YUV channel request, need_metadata is %d",
4810 needMetadata);
4811 } else {
4812 LOGD("request with buffer %p, frame_number %d",
4813 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004814
4815 rc = channel->request(output.buffer, frameNumber, indexUsed);
4816
4817 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4818 uint32_t j = 0;
4819 for (j = 0; j < streamsArray.num_streams; j++) {
4820 if (streamsArray.stream_request[j].streamID == streamId) {
4821 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4822 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4823 else
4824 streamsArray.stream_request[j].buf_index = indexUsed;
4825 break;
4826 }
4827 }
4828 if (j == streamsArray.num_streams) {
4829 LOGE("Did not find matching stream to update index");
4830 assert(0);
4831 }
4832
Thierry Strudel3d639192016-09-09 11:52:26 -07004833 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4834 && mBatchSize) {
4835 mToBeQueuedVidBufs++;
4836 if (mToBeQueuedVidBufs == mBatchSize) {
4837 channel->queueBatchBuf();
4838 }
4839 }
4840 if (rc < 0) {
4841 LOGE("request failed");
4842 pthread_mutex_unlock(&mMutex);
4843 return rc;
4844 }
4845 }
4846 pendingBufferIter++;
4847 }
4848
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004849 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4850 itr++) {
4851 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4852
4853 if (channel == NULL) {
4854 LOGE("invalid channel pointer for stream");
4855 assert(0);
4856 return BAD_VALUE;
4857 }
4858
4859 InternalRequest requestedStream;
4860 requestedStream = (*itr);
4861
4862
4863 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4864 LOGD("snapshot request internally input buffer %p, frame_number %d",
4865 request->input_buffer, frameNumber);
4866 if(request->input_buffer != NULL){
4867 rc = channel->request(NULL, frameNumber,
4868 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4869 if (rc < 0) {
4870 LOGE("Fail to request on picture channel");
4871 pthread_mutex_unlock(&mMutex);
4872 return rc;
4873 }
4874 } else {
4875 LOGD("snapshot request with frame_number %d", frameNumber);
4876 if (!request->settings) {
4877 rc = channel->request(NULL, frameNumber,
4878 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4879 } else {
4880 rc = channel->request(NULL, frameNumber,
4881 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4882 }
4883 if (rc < 0) {
4884 LOGE("Fail to request on picture channel");
4885 pthread_mutex_unlock(&mMutex);
4886 return rc;
4887 }
4888
4889 if ((*itr).meteringOnly != 1) {
4890 requestedStream.need_metadata = 1;
4891 streams_need_metadata++;
4892 }
4893 }
4894
4895 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4896 uint32_t j = 0;
4897 for (j = 0; j < streamsArray.num_streams; j++) {
4898 if (streamsArray.stream_request[j].streamID == streamId) {
4899 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4900 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4901 else
4902 streamsArray.stream_request[j].buf_index = indexUsed;
4903 break;
4904 }
4905 }
4906 if (j == streamsArray.num_streams) {
4907 LOGE("Did not find matching stream to update index");
4908 assert(0);
4909 }
4910
4911 } else {
4912 LOGE("Internal requests not supported on this stream type");
4913 assert(0);
4914 return INVALID_OPERATION;
4915 }
4916 latestRequest->internalRequestList.push_back(requestedStream);
4917 }
4918
Thierry Strudel3d639192016-09-09 11:52:26 -07004919 //If 2 streams have need_metadata set to true, fail the request, unless
4920 //we copy/reference count the metadata buffer
4921 if (streams_need_metadata > 1) {
4922 LOGE("not supporting request in which two streams requires"
4923 " 2 HAL metadata for reprocessing");
4924 pthread_mutex_unlock(&mMutex);
4925 return -EINVAL;
4926 }
4927
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* Set the parameters to backend:
4930 * - For every request in NORMAL MODE
4931 * - For every request in HFR mode during preview only case
4932 * - Once every batch in HFR mode during video recording
4933 */
4934 if (!mBatchSize ||
4935 (mBatchSize && !isVidBufRequested) ||
4936 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4937 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4938 mBatchSize, isVidBufRequested,
4939 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004940
4941 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4942 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4943 uint32_t m = 0;
4944 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4945 if (streamsArray.stream_request[k].streamID ==
4946 mBatchedStreamsArray.stream_request[m].streamID)
4947 break;
4948 }
4949 if (m == mBatchedStreamsArray.num_streams) {
4950 mBatchedStreamsArray.stream_request\
4951 [mBatchedStreamsArray.num_streams].streamID =
4952 streamsArray.stream_request[k].streamID;
4953 mBatchedStreamsArray.stream_request\
4954 [mBatchedStreamsArray.num_streams].buf_index =
4955 streamsArray.stream_request[k].buf_index;
4956 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4957 }
4958 }
4959 streamsArray = mBatchedStreamsArray;
4960 }
4961 /* Update stream id of all the requested buffers */
4962 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4963 LOGE("Failed to set stream type mask in the parameters");
4964 return BAD_VALUE;
4965 }
4966
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4968 mParameters);
4969 if (rc < 0) {
4970 LOGE("set_parms failed");
4971 }
4972 /* reset to zero coz, the batch is queued */
4973 mToBeQueuedVidBufs = 0;
4974 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004975 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
4976 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
4977 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4978 uint32_t m = 0;
4979 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4980 if (streamsArray.stream_request[k].streamID ==
4981 mBatchedStreamsArray.stream_request[m].streamID)
4982 break;
4983 }
4984 if (m == mBatchedStreamsArray.num_streams) {
4985 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
4986 streamsArray.stream_request[k].streamID;
4987 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
4988 streamsArray.stream_request[k].buf_index;
4989 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4990 }
4991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 }
4993 mPendingLiveRequest++;
4994 }
4995
4996 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4997
4998 mState = STARTED;
4999 // Added a timed condition wait
5000 struct timespec ts;
5001 uint8_t isValidTimeout = 1;
5002 rc = clock_gettime(CLOCK_REALTIME, &ts);
5003 if (rc < 0) {
5004 isValidTimeout = 0;
5005 LOGE("Error reading the real time clock!!");
5006 }
5007 else {
5008 // Make timeout as 5 sec for request to be honored
5009 ts.tv_sec += 5;
5010 }
5011 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005012 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005013 (mState != ERROR) && (mState != DEINIT)) {
5014 if (!isValidTimeout) {
5015 LOGD("Blocking on conditional wait");
5016 pthread_cond_wait(&mRequestCond, &mMutex);
5017 }
5018 else {
5019 LOGD("Blocking on timed conditional wait");
5020 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5021 if (rc == ETIMEDOUT) {
5022 rc = -ENODEV;
5023 LOGE("Unblocked on timeout!!!!");
5024 break;
5025 }
5026 }
5027 LOGD("Unblocked");
5028 if (mWokenUpByDaemon) {
5029 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005030 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 break;
5032 }
5033 }
5034 pthread_mutex_unlock(&mMutex);
5035
5036 return rc;
5037}
5038
5039/*===========================================================================
5040 * FUNCTION : dump
5041 *
5042 * DESCRIPTION:
5043 *
5044 * PARAMETERS :
5045 *
5046 *
5047 * RETURN :
5048 *==========================================================================*/
5049void QCamera3HardwareInterface::dump(int fd)
5050{
5051 pthread_mutex_lock(&mMutex);
5052 dprintf(fd, "\n Camera HAL3 information Begin \n");
5053
5054 dprintf(fd, "\nNumber of pending requests: %zu \n",
5055 mPendingRequestsList.size());
5056 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5057 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5058 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5059 for(pendingRequestIterator i = mPendingRequestsList.begin();
5060 i != mPendingRequestsList.end(); i++) {
5061 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5062 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5063 i->input_buffer);
5064 }
5065 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5066 mPendingBuffersMap.get_num_overall_buffers());
5067 dprintf(fd, "-------+------------------\n");
5068 dprintf(fd, " Frame | Stream type mask \n");
5069 dprintf(fd, "-------+------------------\n");
5070 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5071 for(auto &j : req.mPendingBufferList) {
5072 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5073 dprintf(fd, " %5d | %11d \n",
5074 req.frame_number, channel->getStreamTypeMask());
5075 }
5076 }
5077 dprintf(fd, "-------+------------------\n");
5078
5079 dprintf(fd, "\nPending frame drop list: %zu\n",
5080 mPendingFrameDropList.size());
5081 dprintf(fd, "-------+-----------\n");
5082 dprintf(fd, " Frame | Stream ID \n");
5083 dprintf(fd, "-------+-----------\n");
5084 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5085 i != mPendingFrameDropList.end(); i++) {
5086 dprintf(fd, " %5d | %9d \n",
5087 i->frame_number, i->stream_ID);
5088 }
5089 dprintf(fd, "-------+-----------\n");
5090
5091 dprintf(fd, "\n Camera HAL3 information End \n");
5092
5093 /* use dumpsys media.camera as trigger to send update debug level event */
5094 mUpdateDebugLevel = true;
5095 pthread_mutex_unlock(&mMutex);
5096 return;
5097}
5098
5099/*===========================================================================
5100 * FUNCTION : flush
5101 *
5102 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5103 * conditionally restarts channels
5104 *
5105 * PARAMETERS :
5106 * @ restartChannels: re-start all channels
5107 *
5108 *
5109 * RETURN :
5110 * 0 on success
5111 * Error code on failure
5112 *==========================================================================*/
5113int QCamera3HardwareInterface::flush(bool restartChannels)
5114{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005115 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH);
Thierry Strudel3d639192016-09-09 11:52:26 -07005116 int32_t rc = NO_ERROR;
5117
5118 LOGD("Unblocking Process Capture Request");
5119 pthread_mutex_lock(&mMutex);
5120 mFlush = true;
5121 pthread_mutex_unlock(&mMutex);
5122
5123 rc = stopAllChannels();
5124 // unlink of dualcam
5125 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005126 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5127 &m_pDualCamCmdPtr->bundle_info;
5128 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5130 pthread_mutex_lock(&gCamLock);
5131
5132 if (mIsMainCamera == 1) {
5133 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5134 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005135 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005136 // related session id should be session id of linked session
5137 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5138 } else {
5139 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5140 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005141 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005142 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5143 }
5144 pthread_mutex_unlock(&gCamLock);
5145
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005146 rc = mCameraHandle->ops->set_dual_cam_cmd(
5147 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 if (rc < 0) {
5149 LOGE("Dualcam: Unlink failed, but still proceed to close");
5150 }
5151 }
5152
5153 if (rc < 0) {
5154 LOGE("stopAllChannels failed");
5155 return rc;
5156 }
5157 if (mChannelHandle) {
5158 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5159 mChannelHandle);
5160 }
5161
5162 // Reset bundle info
5163 rc = setBundleInfo();
5164 if (rc < 0) {
5165 LOGE("setBundleInfo failed %d", rc);
5166 return rc;
5167 }
5168
5169 // Mutex Lock
5170 pthread_mutex_lock(&mMutex);
5171
5172 // Unblock process_capture_request
5173 mPendingLiveRequest = 0;
5174 pthread_cond_signal(&mRequestCond);
5175
5176 rc = notifyErrorForPendingRequests();
5177 if (rc < 0) {
5178 LOGE("notifyErrorForPendingRequests failed");
5179 pthread_mutex_unlock(&mMutex);
5180 return rc;
5181 }
5182
5183 mFlush = false;
5184
5185 // Start the Streams/Channels
5186 if (restartChannels) {
5187 rc = startAllChannels();
5188 if (rc < 0) {
5189 LOGE("startAllChannels failed");
5190 pthread_mutex_unlock(&mMutex);
5191 return rc;
5192 }
5193 }
5194
5195 if (mChannelHandle) {
5196 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5197 mChannelHandle);
5198 if (rc < 0) {
5199 LOGE("start_channel failed");
5200 pthread_mutex_unlock(&mMutex);
5201 return rc;
5202 }
5203 }
5204
5205 pthread_mutex_unlock(&mMutex);
5206
5207 return 0;
5208}
5209
5210/*===========================================================================
5211 * FUNCTION : flushPerf
5212 *
5213 * DESCRIPTION: This is the performance optimization version of flush that does
5214 * not use stream off, rather flushes the system
5215 *
5216 * PARAMETERS :
5217 *
5218 *
5219 * RETURN : 0 : success
5220 * -EINVAL: input is malformed (device is not valid)
5221 * -ENODEV: if the device has encountered a serious error
5222 *==========================================================================*/
5223int QCamera3HardwareInterface::flushPerf()
5224{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005225 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_FLUSH_PREF);
Thierry Strudel3d639192016-09-09 11:52:26 -07005226 int32_t rc = 0;
5227 struct timespec timeout;
5228 bool timed_wait = false;
5229
5230 pthread_mutex_lock(&mMutex);
5231 mFlushPerf = true;
5232 mPendingBuffersMap.numPendingBufsAtFlush =
5233 mPendingBuffersMap.get_num_overall_buffers();
5234 LOGD("Calling flush. Wait for %d buffers to return",
5235 mPendingBuffersMap.numPendingBufsAtFlush);
5236
5237 /* send the flush event to the backend */
5238 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5239 if (rc < 0) {
5240 LOGE("Error in flush: IOCTL failure");
5241 mFlushPerf = false;
5242 pthread_mutex_unlock(&mMutex);
5243 return -ENODEV;
5244 }
5245
5246 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5247 LOGD("No pending buffers in HAL, return flush");
5248 mFlushPerf = false;
5249 pthread_mutex_unlock(&mMutex);
5250 return rc;
5251 }
5252
5253 /* wait on a signal that buffers were received */
5254 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5255 if (rc < 0) {
5256 LOGE("Error reading the real time clock, cannot use timed wait");
5257 } else {
5258 timeout.tv_sec += FLUSH_TIMEOUT;
5259 timed_wait = true;
5260 }
5261
5262 //Block on conditional variable
5263 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5264 LOGD("Waiting on mBuffersCond");
5265 if (!timed_wait) {
5266 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5267 if (rc != 0) {
5268 LOGE("pthread_cond_wait failed due to rc = %s",
5269 strerror(rc));
5270 break;
5271 }
5272 } else {
5273 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5274 if (rc != 0) {
5275 LOGE("pthread_cond_timedwait failed due to rc = %s",
5276 strerror(rc));
5277 break;
5278 }
5279 }
5280 }
5281 if (rc != 0) {
5282 mFlushPerf = false;
5283 pthread_mutex_unlock(&mMutex);
5284 return -ENODEV;
5285 }
5286
5287 LOGD("Received buffers, now safe to return them");
5288
5289 //make sure the channels handle flush
5290 //currently only required for the picture channel to release snapshot resources
5291 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5292 it != mStreamInfo.end(); it++) {
5293 QCamera3Channel *channel = (*it)->channel;
5294 if (channel) {
5295 rc = channel->flush();
5296 if (rc) {
5297 LOGE("Flushing the channels failed with error %d", rc);
5298 // even though the channel flush failed we need to continue and
5299 // return the buffers we have to the framework, however the return
5300 // value will be an error
5301 rc = -ENODEV;
5302 }
5303 }
5304 }
5305
5306 /* notify the frameworks and send errored results */
5307 rc = notifyErrorForPendingRequests();
5308 if (rc < 0) {
5309 LOGE("notifyErrorForPendingRequests failed");
5310 pthread_mutex_unlock(&mMutex);
5311 return rc;
5312 }
5313
5314 //unblock process_capture_request
5315 mPendingLiveRequest = 0;
5316 unblockRequestIfNecessary();
5317
5318 mFlushPerf = false;
5319 pthread_mutex_unlock(&mMutex);
5320 LOGD ("Flush Operation complete. rc = %d", rc);
5321 return rc;
5322}
5323
5324/*===========================================================================
5325 * FUNCTION : handleCameraDeviceError
5326 *
5327 * DESCRIPTION: This function calls internal flush and notifies the error to
5328 * framework and updates the state variable.
5329 *
5330 * PARAMETERS : None
5331 *
5332 * RETURN : NO_ERROR on Success
5333 * Error code on failure
5334 *==========================================================================*/
5335int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5336{
5337 int32_t rc = NO_ERROR;
5338
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005339 {
5340 Mutex::Autolock lock(mFlushLock);
5341 pthread_mutex_lock(&mMutex);
5342 if (mState != ERROR) {
5343 //if mState != ERROR, nothing to be done
5344 pthread_mutex_unlock(&mMutex);
5345 return NO_ERROR;
5346 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005348
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005349 rc = flush(false /* restart channels */);
5350 if (NO_ERROR != rc) {
5351 LOGE("internal flush to handle mState = ERROR failed");
5352 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005353
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005354 pthread_mutex_lock(&mMutex);
5355 mState = DEINIT;
5356 pthread_mutex_unlock(&mMutex);
5357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005358
5359 camera3_notify_msg_t notify_msg;
5360 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5361 notify_msg.type = CAMERA3_MSG_ERROR;
5362 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5363 notify_msg.message.error.error_stream = NULL;
5364 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005365 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005366
5367 return rc;
5368}
5369
5370/*===========================================================================
5371 * FUNCTION : captureResultCb
5372 *
5373 * DESCRIPTION: Callback handler for all capture result
5374 * (streams, as well as metadata)
5375 *
5376 * PARAMETERS :
5377 * @metadata : metadata information
5378 * @buffer : actual gralloc buffer to be returned to frameworks.
5379 * NULL if metadata.
5380 *
5381 * RETURN : NONE
5382 *==========================================================================*/
5383void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5384 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5385{
5386 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005387 pthread_mutex_lock(&mMutex);
5388 uint8_t batchSize = mBatchSize;
5389 pthread_mutex_unlock(&mMutex);
5390 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005391 handleBatchMetadata(metadata_buf,
5392 true /* free_and_bufdone_meta_buf */);
5393 } else { /* mBatchSize = 0 */
5394 hdrPlusPerfLock(metadata_buf);
5395 pthread_mutex_lock(&mMutex);
5396 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005397 true /* free_and_bufdone_meta_buf */,
5398 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005399 pthread_mutex_unlock(&mMutex);
5400 }
5401 } else if (isInputBuffer) {
5402 pthread_mutex_lock(&mMutex);
5403 handleInputBufferWithLock(frame_number);
5404 pthread_mutex_unlock(&mMutex);
5405 } else {
5406 pthread_mutex_lock(&mMutex);
5407 handleBufferWithLock(buffer, frame_number);
5408 pthread_mutex_unlock(&mMutex);
5409 }
5410 return;
5411}
5412
5413/*===========================================================================
5414 * FUNCTION : getReprocessibleOutputStreamId
5415 *
5416 * DESCRIPTION: Get source output stream id for the input reprocess stream
5417 * based on size and format, which would be the largest
5418 * output stream if an input stream exists.
5419 *
5420 * PARAMETERS :
5421 * @id : return the stream id if found
5422 *
5423 * RETURN : int32_t type of status
5424 * NO_ERROR -- success
5425 * none-zero failure code
5426 *==========================================================================*/
5427int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5428{
5429 /* check if any output or bidirectional stream with the same size and format
5430 and return that stream */
5431 if ((mInputStreamInfo.dim.width > 0) &&
5432 (mInputStreamInfo.dim.height > 0)) {
5433 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5434 it != mStreamInfo.end(); it++) {
5435
5436 camera3_stream_t *stream = (*it)->stream;
5437 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5438 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5439 (stream->format == mInputStreamInfo.format)) {
5440 // Usage flag for an input stream and the source output stream
5441 // may be different.
5442 LOGD("Found reprocessible output stream! %p", *it);
5443 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5444 stream->usage, mInputStreamInfo.usage);
5445
5446 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5447 if (channel != NULL && channel->mStreams[0]) {
5448 id = channel->mStreams[0]->getMyServerID();
5449 return NO_ERROR;
5450 }
5451 }
5452 }
5453 } else {
5454 LOGD("No input stream, so no reprocessible output stream");
5455 }
5456 return NAME_NOT_FOUND;
5457}
5458
5459/*===========================================================================
5460 * FUNCTION : lookupFwkName
5461 *
5462 * DESCRIPTION: In case the enum is not same in fwk and backend
5463 * make sure the parameter is correctly propogated
5464 *
5465 * PARAMETERS :
5466 * @arr : map between the two enums
5467 * @len : len of the map
5468 * @hal_name : name of the hal_parm to map
5469 *
5470 * RETURN : int type of status
5471 * fwk_name -- success
5472 * none-zero failure code
5473 *==========================================================================*/
5474template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5475 size_t len, halType hal_name)
5476{
5477
5478 for (size_t i = 0; i < len; i++) {
5479 if (arr[i].hal_name == hal_name) {
5480 return arr[i].fwk_name;
5481 }
5482 }
5483
5484 /* Not able to find matching framework type is not necessarily
5485 * an error case. This happens when mm-camera supports more attributes
5486 * than the frameworks do */
5487 LOGH("Cannot find matching framework type");
5488 return NAME_NOT_FOUND;
5489}
5490
5491/*===========================================================================
5492 * FUNCTION : lookupHalName
5493 *
5494 * DESCRIPTION: In case the enum is not same in fwk and backend
5495 * make sure the parameter is correctly propogated
5496 *
5497 * PARAMETERS :
5498 * @arr : map between the two enums
5499 * @len : len of the map
5500 * @fwk_name : name of the hal_parm to map
5501 *
5502 * RETURN : int32_t type of status
5503 * hal_name -- success
5504 * none-zero failure code
5505 *==========================================================================*/
5506template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5507 size_t len, fwkType fwk_name)
5508{
5509 for (size_t i = 0; i < len; i++) {
5510 if (arr[i].fwk_name == fwk_name) {
5511 return arr[i].hal_name;
5512 }
5513 }
5514
5515 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5516 return NAME_NOT_FOUND;
5517}
5518
5519/*===========================================================================
5520 * FUNCTION : lookupProp
5521 *
5522 * DESCRIPTION: lookup a value by its name
5523 *
5524 * PARAMETERS :
5525 * @arr : map between the two enums
5526 * @len : size of the map
5527 * @name : name to be looked up
5528 *
5529 * RETURN : Value if found
5530 * CAM_CDS_MODE_MAX if not found
5531 *==========================================================================*/
5532template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5533 size_t len, const char *name)
5534{
5535 if (name) {
5536 for (size_t i = 0; i < len; i++) {
5537 if (!strcmp(arr[i].desc, name)) {
5538 return arr[i].val;
5539 }
5540 }
5541 }
5542 return CAM_CDS_MODE_MAX;
5543}
5544
5545/*===========================================================================
5546 *
5547 * DESCRIPTION:
5548 *
5549 * PARAMETERS :
5550 * @metadata : metadata information from callback
5551 * @timestamp: metadata buffer timestamp
5552 * @request_id: request id
5553 * @jpegMetadata: additional jpeg metadata
5554 * @pprocDone: whether internal offline postprocsesing is done
5555 *
5556 * RETURN : camera_metadata_t*
5557 * metadata in a format specified by fwk
5558 *==========================================================================*/
5559camera_metadata_t*
5560QCamera3HardwareInterface::translateFromHalMetadata(
5561 metadata_buffer_t *metadata,
5562 nsecs_t timestamp,
5563 int32_t request_id,
5564 const CameraMetadata& jpegMetadata,
5565 uint8_t pipeline_depth,
5566 uint8_t capture_intent,
5567 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005568 uint8_t fwk_cacMode,
5569 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005570{
5571 CameraMetadata camMetadata;
5572 camera_metadata_t *resultMetadata;
5573
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005574 if (mBatchSize && !firstMetadataInBatch) {
5575 /* In batch mode, use cached metadata from the first metadata
5576 in the batch */
5577 camMetadata.clear();
5578 camMetadata = mCachedMetadata;
5579 }
5580
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 if (jpegMetadata.entryCount())
5582 camMetadata.append(jpegMetadata);
5583
5584 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5585 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5586 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5587 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
5588
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005589 if (mBatchSize && !firstMetadataInBatch) {
5590 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5591 resultMetadata = camMetadata.release();
5592 return resultMetadata;
5593 }
5594
Thierry Strudel3d639192016-09-09 11:52:26 -07005595 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5596 int64_t fwk_frame_number = *frame_number;
5597 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5598 }
5599
5600 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5601 int32_t fps_range[2];
5602 fps_range[0] = (int32_t)float_range->min_fps;
5603 fps_range[1] = (int32_t)float_range->max_fps;
5604 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5605 fps_range, 2);
5606 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5607 fps_range[0], fps_range[1]);
5608 }
5609
5610 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5611 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5612 }
5613
5614 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5615 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5616 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5617 *sceneMode);
5618 if (NAME_NOT_FOUND != val) {
5619 uint8_t fwkSceneMode = (uint8_t)val;
5620 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5621 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5622 fwkSceneMode);
5623 }
5624 }
5625
5626 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5627 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5628 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5629 }
5630
5631 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5632 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5633 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5634 }
5635
5636 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5637 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5638 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5639 }
5640
5641 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5642 CAM_INTF_META_EDGE_MODE, metadata) {
5643 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5644 }
5645
5646 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5647 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5648 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5649 }
5650
5651 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5652 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5653 }
5654
5655 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5656 if (0 <= *flashState) {
5657 uint8_t fwk_flashState = (uint8_t) *flashState;
5658 if (!gCamCapability[mCameraId]->flash_available) {
5659 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5660 }
5661 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5662 }
5663 }
5664
5665 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5666 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5667 if (NAME_NOT_FOUND != val) {
5668 uint8_t fwk_flashMode = (uint8_t)val;
5669 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5670 }
5671 }
5672
5673 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5674 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5675 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5676 }
5677
5678 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5679 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5680 }
5681
5682 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5683 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5684 }
5685
5686 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5687 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5688 }
5689
5690 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5691 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5692 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5693 }
5694
5695 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5696 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5697 LOGD("fwk_videoStab = %d", fwk_videoStab);
5698 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5699 } else {
5700 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5701 // and so hardcoding the Video Stab result to OFF mode.
5702 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5703 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005704 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005705 }
5706
5707 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5708 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5709 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5710 }
5711
5712 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5713 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5714 }
5715
5716 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
5717 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
5718
5719 LOGD("dynamicblackLevel = %f %f %f %f",
5720 blackLevelSourcePattern->cam_black_level[0],
5721 blackLevelSourcePattern->cam_black_level[1],
5722 blackLevelSourcePattern->cam_black_level[2],
5723 blackLevelSourcePattern->cam_black_level[3]);
5724 }
5725
5726 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5727 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
5728 float fwk_blackLevelInd[4];
5729
5730 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
5731 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
5732 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
5733 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
5734
5735 LOGD("applied dynamicblackLevel = %f %f %f %f",
5736 blackLevelAppliedPattern->cam_black_level[0],
5737 blackLevelAppliedPattern->cam_black_level[1],
5738 blackLevelAppliedPattern->cam_black_level[2],
5739 blackLevelAppliedPattern->cam_black_level[3]);
5740 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005741
5742#ifndef USE_HAL_3_3
5743 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
5744 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
5745 // depth space.
5746 fwk_blackLevelInd[0] /= 64.0;
5747 fwk_blackLevelInd[1] /= 64.0;
5748 fwk_blackLevelInd[2] /= 64.0;
5749 fwk_blackLevelInd[3] /= 64.0;
5750 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
5751#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005752 }
5753
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005754#ifndef USE_HAL_3_3
5755 // Fixed whitelevel is used by ISP/Sensor
5756 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5757 &gCamCapability[mCameraId]->white_level, 1);
5758#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005759
5760 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5761 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5762 int32_t scalerCropRegion[4];
5763 scalerCropRegion[0] = hScalerCropRegion->left;
5764 scalerCropRegion[1] = hScalerCropRegion->top;
5765 scalerCropRegion[2] = hScalerCropRegion->width;
5766 scalerCropRegion[3] = hScalerCropRegion->height;
5767
5768 // Adjust crop region from sensor output coordinate system to active
5769 // array coordinate system.
5770 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5771 scalerCropRegion[2], scalerCropRegion[3]);
5772
5773 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5774 }
5775
5776 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5777 LOGD("sensorExpTime = %lld", *sensorExpTime);
5778 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5779 }
5780
5781 IF_META_AVAILABLE(int64_t, sensorFameDuration,
5782 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5783 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5784 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5785 }
5786
5787 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5788 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5789 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5790 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5791 sensorRollingShutterSkew, 1);
5792 }
5793
5794 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5795 LOGD("sensorSensitivity = %d", *sensorSensitivity);
5796 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5797
5798 //calculate the noise profile based on sensitivity
5799 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5800 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5801 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5802 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5803 noise_profile[i] = noise_profile_S;
5804 noise_profile[i+1] = noise_profile_O;
5805 }
5806 LOGD("noise model entry (S, O) is (%f, %f)",
5807 noise_profile_S, noise_profile_O);
5808 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5809 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5810 }
5811
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005812#ifndef USE_HAL_3_3
5813 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5814 int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
5815 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5816 }
5817#endif
5818
Thierry Strudel3d639192016-09-09 11:52:26 -07005819 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5820 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5821 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5822 }
5823
5824 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5825 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5826 *faceDetectMode);
5827 if (NAME_NOT_FOUND != val) {
5828 uint8_t fwk_faceDetectMode = (uint8_t)val;
5829 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5830
5831 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5832 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5833 CAM_INTF_META_FACE_DETECTION, metadata) {
5834 uint8_t numFaces = MIN(
5835 faceDetectionInfo->num_faces_detected, MAX_ROI);
5836 int32_t faceIds[MAX_ROI];
5837 uint8_t faceScores[MAX_ROI];
5838 int32_t faceRectangles[MAX_ROI * 4];
5839 int32_t faceLandmarks[MAX_ROI * 6];
5840 size_t j = 0, k = 0;
5841
5842 for (size_t i = 0; i < numFaces; i++) {
5843 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5844 // Adjust crop region from sensor output coordinate system to active
5845 // array coordinate system.
5846 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5847 mCropRegionMapper.toActiveArray(rect.left, rect.top,
5848 rect.width, rect.height);
5849
5850 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5851 faceRectangles+j, -1);
5852
5853 j+= 4;
5854 }
5855 if (numFaces <= 0) {
5856 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5857 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5858 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5859 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5860 }
5861
5862 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5863 numFaces);
5864 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5865 faceRectangles, numFaces * 4U);
5866 if (fwk_faceDetectMode ==
5867 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5868 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5869 CAM_INTF_META_FACE_LANDMARK, metadata) {
5870
5871 for (size_t i = 0; i < numFaces; i++) {
5872 // Map the co-ordinate sensor output coordinate system to active
5873 // array coordinate system.
5874 mCropRegionMapper.toActiveArray(
5875 landmarks->face_landmarks[i].left_eye_center.x,
5876 landmarks->face_landmarks[i].left_eye_center.y);
5877 mCropRegionMapper.toActiveArray(
5878 landmarks->face_landmarks[i].right_eye_center.x,
5879 landmarks->face_landmarks[i].right_eye_center.y);
5880 mCropRegionMapper.toActiveArray(
5881 landmarks->face_landmarks[i].mouth_center.x,
5882 landmarks->face_landmarks[i].mouth_center.y);
5883
5884 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07005885 k+= TOTAL_LANDMARK_INDICES;
5886 }
5887 } else {
5888 for (size_t i = 0; i < numFaces; i++) {
5889 setInvalidLandmarks(faceLandmarks+k);
5890 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07005891 }
5892 }
5893
5894 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5895 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5896 faceLandmarks, numFaces * 6U);
5897 }
5898 }
5899 }
5900 }
5901 }
5902
5903 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5904 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5905 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005906
5907 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
5908 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
5909 // process histogram statistics info
5910 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
5911 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
5912 cam_histogram_data_t rHistData, gHistData, bHistData;
5913 memset(&rHistData, 0, sizeof(rHistData));
5914 memset(&gHistData, 0, sizeof(gHistData));
5915 memset(&bHistData, 0, sizeof(bHistData));
5916
5917 switch (stats_data->type) {
5918 case CAM_HISTOGRAM_TYPE_BAYER:
5919 switch (stats_data->bayer_stats.data_type) {
5920 case CAM_STATS_CHANNEL_GR:
5921 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
5922 break;
5923 case CAM_STATS_CHANNEL_GB:
5924 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
5925 break;
5926 case CAM_STATS_CHANNEL_B:
5927 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
5928 break;
5929 case CAM_STATS_CHANNEL_ALL:
5930 rHistData = stats_data->bayer_stats.r_stats;
5931 //Framework expects only 3 channels. So, for now,
5932 //use gb stats for G channel.
5933 gHistData = stats_data->bayer_stats.gb_stats;
5934 bHistData = stats_data->bayer_stats.b_stats;
5935 break;
5936 case CAM_STATS_CHANNEL_Y:
5937 case CAM_STATS_CHANNEL_R:
5938 default:
5939 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
5940 break;
5941 }
5942 break;
5943 case CAM_HISTOGRAM_TYPE_YUV:
5944 rHistData = gHistData = bHistData = stats_data->yuv_stats;
5945 break;
5946 }
5947
5948 memcpy(hist_buf, rHistData.hist_buf, hist_size);
5949 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
5950 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
5951
5952 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
5953 }
5954 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005955 }
5956
5957 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5958 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5959 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5960 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5961 }
5962
5963 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5964 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5965 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5966 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5967 }
5968
5969 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5970 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5971 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5972 CAM_MAX_SHADING_MAP_HEIGHT);
5973 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5974 CAM_MAX_SHADING_MAP_WIDTH);
5975 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5976 lensShadingMap->lens_shading, 4U * map_width * map_height);
5977 }
5978
5979 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5980 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5981 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5982 }
5983
5984 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5985 //Populate CAM_INTF_META_TONEMAP_CURVES
5986 /* ch0 = G, ch 1 = B, ch 2 = R*/
5987 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5988 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5989 tonemap->tonemap_points_cnt,
5990 CAM_MAX_TONEMAP_CURVE_SIZE);
5991 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5992 }
5993
5994 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5995 &tonemap->curves[0].tonemap_points[0][0],
5996 tonemap->tonemap_points_cnt * 2);
5997
5998 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5999 &tonemap->curves[1].tonemap_points[0][0],
6000 tonemap->tonemap_points_cnt * 2);
6001
6002 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6003 &tonemap->curves[2].tonemap_points[0][0],
6004 tonemap->tonemap_points_cnt * 2);
6005 }
6006
6007 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6008 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6009 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6010 CC_GAIN_MAX);
6011 }
6012
6013 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6014 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6015 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6016 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6017 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6018 }
6019
6020 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6021 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6022 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6023 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6024 toneCurve->tonemap_points_cnt,
6025 CAM_MAX_TONEMAP_CURVE_SIZE);
6026 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6027 }
6028 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6029 (float*)toneCurve->curve.tonemap_points,
6030 toneCurve->tonemap_points_cnt * 2);
6031 }
6032
6033 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6034 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6035 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6036 predColorCorrectionGains->gains, 4);
6037 }
6038
6039 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6040 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6041 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6042 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6043 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6044 }
6045
6046 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6047 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6048 }
6049
6050 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6051 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6052 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6053 }
6054
6055 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6056 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6057 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6058 }
6059
6060 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6061 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6062 *effectMode);
6063 if (NAME_NOT_FOUND != val) {
6064 uint8_t fwk_effectMode = (uint8_t)val;
6065 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6066 }
6067 }
6068
6069 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6070 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6071 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6072 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6073 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6074 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6075 }
6076 int32_t fwk_testPatternData[4];
6077 fwk_testPatternData[0] = testPatternData->r;
6078 fwk_testPatternData[3] = testPatternData->b;
6079 switch (gCamCapability[mCameraId]->color_arrangement) {
6080 case CAM_FILTER_ARRANGEMENT_RGGB:
6081 case CAM_FILTER_ARRANGEMENT_GRBG:
6082 fwk_testPatternData[1] = testPatternData->gr;
6083 fwk_testPatternData[2] = testPatternData->gb;
6084 break;
6085 case CAM_FILTER_ARRANGEMENT_GBRG:
6086 case CAM_FILTER_ARRANGEMENT_BGGR:
6087 fwk_testPatternData[2] = testPatternData->gr;
6088 fwk_testPatternData[1] = testPatternData->gb;
6089 break;
6090 default:
6091 LOGE("color arrangement %d is not supported",
6092 gCamCapability[mCameraId]->color_arrangement);
6093 break;
6094 }
6095 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6096 }
6097
6098 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6099 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6100 }
6101
6102 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6103 String8 str((const char *)gps_methods);
6104 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6105 }
6106
6107 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6108 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6109 }
6110
6111 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6112 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6113 }
6114
6115 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6116 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6117 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6118 }
6119
6120 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6121 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6122 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6123 }
6124
6125 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6126 int32_t fwk_thumb_size[2];
6127 fwk_thumb_size[0] = thumb_size->width;
6128 fwk_thumb_size[1] = thumb_size->height;
6129 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6130 }
6131
6132 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6133 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6134 privateData,
6135 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6136 }
6137
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006138 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6139 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6140 meteringMode, 1);
6141 }
6142
Thierry Strudel3d639192016-09-09 11:52:26 -07006143 if (metadata->is_tuning_params_valid) {
6144 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6145 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6146 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6147
6148
6149 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6150 sizeof(uint32_t));
6151 data += sizeof(uint32_t);
6152
6153 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6154 sizeof(uint32_t));
6155 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6156 data += sizeof(uint32_t);
6157
6158 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6159 sizeof(uint32_t));
6160 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6161 data += sizeof(uint32_t);
6162
6163 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6164 sizeof(uint32_t));
6165 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6166 data += sizeof(uint32_t);
6167
6168 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6169 sizeof(uint32_t));
6170 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6171 data += sizeof(uint32_t);
6172
6173 metadata->tuning_params.tuning_mod3_data_size = 0;
6174 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6175 sizeof(uint32_t));
6176 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6177 data += sizeof(uint32_t);
6178
6179 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6180 TUNING_SENSOR_DATA_MAX);
6181 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6182 count);
6183 data += count;
6184
6185 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6186 TUNING_VFE_DATA_MAX);
6187 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6188 count);
6189 data += count;
6190
6191 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6192 TUNING_CPP_DATA_MAX);
6193 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6194 count);
6195 data += count;
6196
6197 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6198 TUNING_CAC_DATA_MAX);
6199 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6200 count);
6201 data += count;
6202
6203 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6204 (int32_t *)(void *)tuning_meta_data_blob,
6205 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6206 }
6207
6208 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6209 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6210 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6211 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6212 NEUTRAL_COL_POINTS);
6213 }
6214
6215 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6216 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6217 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6218 }
6219
6220 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6221 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6222 // Adjust crop region from sensor output coordinate system to active
6223 // array coordinate system.
6224 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6225 hAeRegions->rect.width, hAeRegions->rect.height);
6226
6227 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6228 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6229 REGIONS_TUPLE_COUNT);
6230 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6231 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6232 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6233 hAeRegions->rect.height);
6234 }
6235
6236 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6237 uint8_t fwk_afState = (uint8_t) *afState;
6238 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
6239 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
6240 }
6241
6242 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6243 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6244 }
6245
6246 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6247 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6248 }
6249
6250 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6251 uint8_t fwk_lensState = *lensState;
6252 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6253 }
6254
6255 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6256 /*af regions*/
6257 int32_t afRegions[REGIONS_TUPLE_COUNT];
6258 // Adjust crop region from sensor output coordinate system to active
6259 // array coordinate system.
6260 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6261 hAfRegions->rect.width, hAfRegions->rect.height);
6262
6263 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6264 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6265 REGIONS_TUPLE_COUNT);
6266 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6267 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6268 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6269 hAfRegions->rect.height);
6270 }
6271
6272 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
6273 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
6274 *hal_ab_mode);
6275 if (NAME_NOT_FOUND != val) {
6276 uint8_t fwk_ab_mode = (uint8_t)val;
6277 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6278 }
6279 }
6280
6281 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6282 int val = lookupFwkName(SCENE_MODES_MAP,
6283 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6284 if (NAME_NOT_FOUND != val) {
6285 uint8_t fwkBestshotMode = (uint8_t)val;
6286 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6287 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6288 } else {
6289 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6290 }
6291 }
6292
6293 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6294 uint8_t fwk_mode = (uint8_t) *mode;
6295 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6296 }
6297
6298 /* Constant metadata values to be update*/
6299 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6300 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6301
6302 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6303 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6304
6305 int32_t hotPixelMap[2];
6306 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6307
6308 // CDS
6309 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6310 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6311 }
6312
Thierry Strudel04e026f2016-10-10 11:27:36 -07006313 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6314 int32_t fwk_hdr;
6315 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6316 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6317 } else {
6318 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6319 }
6320 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6321 }
6322
6323 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006324 int32_t fwk_ir = (int32_t) *ir;
6325 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006326 }
6327
Thierry Strudel269c81a2016-10-12 12:13:59 -07006328 // AEC SPEED
6329 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6330 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6331 }
6332
6333 // AWB SPEED
6334 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6335 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6336 }
6337
Thierry Strudel3d639192016-09-09 11:52:26 -07006338 // TNR
6339 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6340 uint8_t tnr_enable = tnr->denoise_enable;
6341 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6342
6343 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6344 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6345 }
6346
6347 // Reprocess crop data
6348 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6349 uint8_t cnt = crop_data->num_of_streams;
6350 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6351 // mm-qcamera-daemon only posts crop_data for streams
6352 // not linked to pproc. So no valid crop metadata is not
6353 // necessarily an error case.
6354 LOGD("No valid crop metadata entries");
6355 } else {
6356 uint32_t reproc_stream_id;
6357 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6358 LOGD("No reprocessible stream found, ignore crop data");
6359 } else {
6360 int rc = NO_ERROR;
6361 Vector<int32_t> roi_map;
6362 int32_t *crop = new int32_t[cnt*4];
6363 if (NULL == crop) {
6364 rc = NO_MEMORY;
6365 }
6366 if (NO_ERROR == rc) {
6367 int32_t streams_found = 0;
6368 for (size_t i = 0; i < cnt; i++) {
6369 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6370 if (pprocDone) {
6371 // HAL already does internal reprocessing,
6372 // either via reprocessing before JPEG encoding,
6373 // or offline postprocessing for pproc bypass case.
6374 crop[0] = 0;
6375 crop[1] = 0;
6376 crop[2] = mInputStreamInfo.dim.width;
6377 crop[3] = mInputStreamInfo.dim.height;
6378 } else {
6379 crop[0] = crop_data->crop_info[i].crop.left;
6380 crop[1] = crop_data->crop_info[i].crop.top;
6381 crop[2] = crop_data->crop_info[i].crop.width;
6382 crop[3] = crop_data->crop_info[i].crop.height;
6383 }
6384 roi_map.add(crop_data->crop_info[i].roi_map.left);
6385 roi_map.add(crop_data->crop_info[i].roi_map.top);
6386 roi_map.add(crop_data->crop_info[i].roi_map.width);
6387 roi_map.add(crop_data->crop_info[i].roi_map.height);
6388 streams_found++;
6389 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6390 crop[0], crop[1], crop[2], crop[3]);
6391 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6392 crop_data->crop_info[i].roi_map.left,
6393 crop_data->crop_info[i].roi_map.top,
6394 crop_data->crop_info[i].roi_map.width,
6395 crop_data->crop_info[i].roi_map.height);
6396 break;
6397
6398 }
6399 }
6400 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6401 &streams_found, 1);
6402 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6403 crop, (size_t)(streams_found * 4));
6404 if (roi_map.array()) {
6405 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6406 roi_map.array(), roi_map.size());
6407 }
6408 }
6409 if (crop) {
6410 delete [] crop;
6411 }
6412 }
6413 }
6414 }
6415
6416 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6417 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6418 // so hardcoding the CAC result to OFF mode.
6419 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6420 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6421 } else {
6422 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6423 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6424 *cacMode);
6425 if (NAME_NOT_FOUND != val) {
6426 uint8_t resultCacMode = (uint8_t)val;
6427 // check whether CAC result from CB is equal to Framework set CAC mode
6428 // If not equal then set the CAC mode came in corresponding request
6429 if (fwk_cacMode != resultCacMode) {
6430 resultCacMode = fwk_cacMode;
6431 }
6432 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6433 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6434 } else {
6435 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6436 }
6437 }
6438 }
6439
6440 // Post blob of cam_cds_data through vendor tag.
6441 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6442 uint8_t cnt = cdsInfo->num_of_streams;
6443 cam_cds_data_t cdsDataOverride;
6444 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6445 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6446 cdsDataOverride.num_of_streams = 1;
6447 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6448 uint32_t reproc_stream_id;
6449 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6450 LOGD("No reprocessible stream found, ignore cds data");
6451 } else {
6452 for (size_t i = 0; i < cnt; i++) {
6453 if (cdsInfo->cds_info[i].stream_id ==
6454 reproc_stream_id) {
6455 cdsDataOverride.cds_info[0].cds_enable =
6456 cdsInfo->cds_info[i].cds_enable;
6457 break;
6458 }
6459 }
6460 }
6461 } else {
6462 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6463 }
6464 camMetadata.update(QCAMERA3_CDS_INFO,
6465 (uint8_t *)&cdsDataOverride,
6466 sizeof(cam_cds_data_t));
6467 }
6468
6469 // Ldaf calibration data
6470 if (!mLdafCalibExist) {
6471 IF_META_AVAILABLE(uint32_t, ldafCalib,
6472 CAM_INTF_META_LDAF_EXIF, metadata) {
6473 mLdafCalibExist = true;
6474 mLdafCalib[0] = ldafCalib[0];
6475 mLdafCalib[1] = ldafCalib[1];
6476 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6477 ldafCalib[0], ldafCalib[1]);
6478 }
6479 }
6480
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006481 // Reprocess and DDM debug data through vendor tag
6482 cam_reprocess_info_t repro_info;
6483 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006484 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6485 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006486 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006487 }
6488 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6489 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006490 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006491 }
6492 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6493 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006494 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006495 }
6496 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6497 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006498 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006499 }
6500 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6501 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006502 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006503 }
6504 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006505 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 }
6507 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6508 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006509 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006510 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006511 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6512 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6513 }
6514 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6515 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6516 }
6517 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6518 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006519
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006520 // INSTANT AEC MODE
6521 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6522 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6523 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6524 }
6525
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006526 /* In batch mode, cache the first metadata in the batch */
6527 if (mBatchSize && firstMetadataInBatch) {
6528 mCachedMetadata.clear();
6529 mCachedMetadata = camMetadata;
6530 }
6531
Thierry Strudel3d639192016-09-09 11:52:26 -07006532 resultMetadata = camMetadata.release();
6533 return resultMetadata;
6534}
6535
6536/*===========================================================================
6537 * FUNCTION : saveExifParams
6538 *
6539 * DESCRIPTION:
6540 *
6541 * PARAMETERS :
6542 * @metadata : metadata information from callback
6543 *
6544 * RETURN : none
6545 *
6546 *==========================================================================*/
6547void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6548{
6549 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6550 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6551 if (mExifParams.debug_params) {
6552 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6553 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6554 }
6555 }
6556 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6557 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6558 if (mExifParams.debug_params) {
6559 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6560 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6561 }
6562 }
6563 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6564 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6565 if (mExifParams.debug_params) {
6566 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6567 mExifParams.debug_params->af_debug_params_valid = TRUE;
6568 }
6569 }
6570 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6571 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6572 if (mExifParams.debug_params) {
6573 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6574 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6575 }
6576 }
6577 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6578 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6579 if (mExifParams.debug_params) {
6580 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6581 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6582 }
6583 }
6584 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6585 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6586 if (mExifParams.debug_params) {
6587 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6588 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6589 }
6590 }
6591 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6592 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6593 if (mExifParams.debug_params) {
6594 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6595 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6596 }
6597 }
6598 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6599 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6600 if (mExifParams.debug_params) {
6601 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6602 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6603 }
6604 }
6605}
6606
6607/*===========================================================================
6608 * FUNCTION : get3AExifParams
6609 *
6610 * DESCRIPTION:
6611 *
6612 * PARAMETERS : none
6613 *
6614 *
6615 * RETURN : mm_jpeg_exif_params_t
6616 *
6617 *==========================================================================*/
6618mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6619{
6620 return mExifParams;
6621}
6622
6623/*===========================================================================
6624 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6625 *
6626 * DESCRIPTION:
6627 *
6628 * PARAMETERS :
6629 * @metadata : metadata information from callback
6630 *
6631 * RETURN : camera_metadata_t*
6632 * metadata in a format specified by fwk
6633 *==========================================================================*/
6634camera_metadata_t*
6635QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6636 (metadata_buffer_t *metadata)
6637{
6638 CameraMetadata camMetadata;
6639 camera_metadata_t *resultMetadata;
6640
6641
6642 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6643 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6644 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6645 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6646 }
6647
6648 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6649 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6650 &aecTrigger->trigger, 1);
6651 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6652 &aecTrigger->trigger_id, 1);
6653 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6654 aecTrigger->trigger);
6655 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6656 aecTrigger->trigger_id);
6657 }
6658
6659 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6660 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6661 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6662 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6663 }
6664
6665 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6666 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6667 if (NAME_NOT_FOUND != val) {
6668 uint8_t fwkAfMode = (uint8_t)val;
6669 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6670 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6671 } else {
6672 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6673 val);
6674 }
6675 }
6676
6677 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6678 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6679 &af_trigger->trigger, 1);
6680 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6681 af_trigger->trigger);
6682 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6683 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6684 af_trigger->trigger_id);
6685 }
6686
6687 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6688 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6689 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6690 if (NAME_NOT_FOUND != val) {
6691 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6692 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6693 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6694 } else {
6695 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6696 }
6697 }
6698
6699 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6700 uint32_t aeMode = CAM_AE_MODE_MAX;
6701 int32_t flashMode = CAM_FLASH_MODE_MAX;
6702 int32_t redeye = -1;
6703 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6704 aeMode = *pAeMode;
6705 }
6706 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6707 flashMode = *pFlashMode;
6708 }
6709 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6710 redeye = *pRedeye;
6711 }
6712
6713 if (1 == redeye) {
6714 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6715 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6716 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6717 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6718 flashMode);
6719 if (NAME_NOT_FOUND != val) {
6720 fwk_aeMode = (uint8_t)val;
6721 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6722 } else {
6723 LOGE("Unsupported flash mode %d", flashMode);
6724 }
6725 } else if (aeMode == CAM_AE_MODE_ON) {
6726 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
6727 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6728 } else if (aeMode == CAM_AE_MODE_OFF) {
6729 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6730 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6731 } else {
6732 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
6733 "flashMode:%d, aeMode:%u!!!",
6734 redeye, flashMode, aeMode);
6735 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006736 if (mInstantAEC) {
6737 // Increment frame Idx count untill a bound reached for instant AEC.
6738 mInstantAecFrameIdxCount++;
6739 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
6740 CAM_INTF_META_AEC_INFO, metadata) {
6741 LOGH("ae_params->settled = %d",ae_params->settled);
6742 // If AEC settled, or if number of frames reached bound value,
6743 // should reset instant AEC.
6744 if (ae_params->settled ||
6745 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
6746 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
6747 mInstantAEC = false;
6748 mResetInstantAEC = true;
6749 mInstantAecFrameIdxCount = 0;
6750 }
6751 }
6752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006753 resultMetadata = camMetadata.release();
6754 return resultMetadata;
6755}
6756
6757/*===========================================================================
6758 * FUNCTION : dumpMetadataToFile
6759 *
6760 * DESCRIPTION: Dumps tuning metadata to file system
6761 *
6762 * PARAMETERS :
6763 * @meta : tuning metadata
6764 * @dumpFrameCount : current dump frame count
6765 * @enabled : Enable mask
6766 *
6767 *==========================================================================*/
6768void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
6769 uint32_t &dumpFrameCount,
6770 bool enabled,
6771 const char *type,
6772 uint32_t frameNumber)
6773{
6774 //Some sanity checks
6775 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
6776 LOGE("Tuning sensor data size bigger than expected %d: %d",
6777 meta.tuning_sensor_data_size,
6778 TUNING_SENSOR_DATA_MAX);
6779 return;
6780 }
6781
6782 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
6783 LOGE("Tuning VFE data size bigger than expected %d: %d",
6784 meta.tuning_vfe_data_size,
6785 TUNING_VFE_DATA_MAX);
6786 return;
6787 }
6788
6789 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6790 LOGE("Tuning CPP data size bigger than expected %d: %d",
6791 meta.tuning_cpp_data_size,
6792 TUNING_CPP_DATA_MAX);
6793 return;
6794 }
6795
6796 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6797 LOGE("Tuning CAC data size bigger than expected %d: %d",
6798 meta.tuning_cac_data_size,
6799 TUNING_CAC_DATA_MAX);
6800 return;
6801 }
6802 //
6803
6804 if(enabled){
6805 char timeBuf[FILENAME_MAX];
6806 char buf[FILENAME_MAX];
6807 memset(buf, 0, sizeof(buf));
6808 memset(timeBuf, 0, sizeof(timeBuf));
6809 time_t current_time;
6810 struct tm * timeinfo;
6811 time (&current_time);
6812 timeinfo = localtime (&current_time);
6813 if (timeinfo != NULL) {
6814 strftime (timeBuf, sizeof(timeBuf),
6815 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
6816 }
6817 String8 filePath(timeBuf);
6818 snprintf(buf,
6819 sizeof(buf),
6820 "%dm_%s_%d.bin",
6821 dumpFrameCount,
6822 type,
6823 frameNumber);
6824 filePath.append(buf);
6825 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6826 if (file_fd >= 0) {
6827 ssize_t written_len = 0;
6828 meta.tuning_data_version = TUNING_DATA_VERSION;
6829 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6830 written_len += write(file_fd, data, sizeof(uint32_t));
6831 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6832 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6833 written_len += write(file_fd, data, sizeof(uint32_t));
6834 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6835 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6836 written_len += write(file_fd, data, sizeof(uint32_t));
6837 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6838 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6839 written_len += write(file_fd, data, sizeof(uint32_t));
6840 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6841 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6842 written_len += write(file_fd, data, sizeof(uint32_t));
6843 meta.tuning_mod3_data_size = 0;
6844 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6845 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6846 written_len += write(file_fd, data, sizeof(uint32_t));
6847 size_t total_size = meta.tuning_sensor_data_size;
6848 data = (void *)((uint8_t *)&meta.data);
6849 written_len += write(file_fd, data, total_size);
6850 total_size = meta.tuning_vfe_data_size;
6851 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6852 written_len += write(file_fd, data, total_size);
6853 total_size = meta.tuning_cpp_data_size;
6854 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6855 written_len += write(file_fd, data, total_size);
6856 total_size = meta.tuning_cac_data_size;
6857 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6858 written_len += write(file_fd, data, total_size);
6859 close(file_fd);
6860 }else {
6861 LOGE("fail to open file for metadata dumping");
6862 }
6863 }
6864}
6865
6866/*===========================================================================
6867 * FUNCTION : cleanAndSortStreamInfo
6868 *
6869 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6870 * and sort them such that raw stream is at the end of the list
6871 * This is a workaround for camera daemon constraint.
6872 *
6873 * PARAMETERS : None
6874 *
6875 *==========================================================================*/
6876void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6877{
6878 List<stream_info_t *> newStreamInfo;
6879
6880 /*clean up invalid streams*/
6881 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6882 it != mStreamInfo.end();) {
6883 if(((*it)->status) == INVALID){
6884 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6885 delete channel;
6886 free(*it);
6887 it = mStreamInfo.erase(it);
6888 } else {
6889 it++;
6890 }
6891 }
6892
6893 // Move preview/video/callback/snapshot streams into newList
6894 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6895 it != mStreamInfo.end();) {
6896 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6897 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6898 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6899 newStreamInfo.push_back(*it);
6900 it = mStreamInfo.erase(it);
6901 } else
6902 it++;
6903 }
6904 // Move raw streams into newList
6905 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6906 it != mStreamInfo.end();) {
6907 newStreamInfo.push_back(*it);
6908 it = mStreamInfo.erase(it);
6909 }
6910
6911 mStreamInfo = newStreamInfo;
6912}
6913
6914/*===========================================================================
6915 * FUNCTION : extractJpegMetadata
6916 *
6917 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6918 * JPEG metadata is cached in HAL, and return as part of capture
6919 * result when metadata is returned from camera daemon.
6920 *
6921 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6922 * @request: capture request
6923 *
6924 *==========================================================================*/
6925void QCamera3HardwareInterface::extractJpegMetadata(
6926 CameraMetadata& jpegMetadata,
6927 const camera3_capture_request_t *request)
6928{
6929 CameraMetadata frame_settings;
6930 frame_settings = request->settings;
6931
6932 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6933 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6934 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6935 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6936
6937 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6938 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6939 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6940 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6941
6942 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6943 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6944 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6945 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6946
6947 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6948 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6949 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6950 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6951
6952 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6953 jpegMetadata.update(ANDROID_JPEG_QUALITY,
6954 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6955 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6956
6957 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6958 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6959 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6960 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6961
6962 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6963 int32_t thumbnail_size[2];
6964 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6965 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6966 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6967 int32_t orientation =
6968 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006969 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006970 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6971 int32_t temp;
6972 temp = thumbnail_size[0];
6973 thumbnail_size[0] = thumbnail_size[1];
6974 thumbnail_size[1] = temp;
6975 }
6976 }
6977 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6978 thumbnail_size,
6979 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6980 }
6981
6982}
6983
6984/*===========================================================================
6985 * FUNCTION : convertToRegions
6986 *
6987 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6988 *
6989 * PARAMETERS :
6990 * @rect : cam_rect_t struct to convert
6991 * @region : int32_t destination array
6992 * @weight : if we are converting from cam_area_t, weight is valid
6993 * else weight = -1
6994 *
6995 *==========================================================================*/
6996void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6997 int32_t *region, int weight)
6998{
6999 region[0] = rect.left;
7000 region[1] = rect.top;
7001 region[2] = rect.left + rect.width;
7002 region[3] = rect.top + rect.height;
7003 if (weight > -1) {
7004 region[4] = weight;
7005 }
7006}
7007
7008/*===========================================================================
7009 * FUNCTION : convertFromRegions
7010 *
7011 * DESCRIPTION: helper method to convert from array to cam_rect_t
7012 *
7013 * PARAMETERS :
7014 * @rect : cam_rect_t struct to convert
7015 * @region : int32_t destination array
7016 * @weight : if we are converting from cam_area_t, weight is valid
7017 * else weight = -1
7018 *
7019 *==========================================================================*/
7020void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7021 const camera_metadata_t *settings, uint32_t tag)
7022{
7023 CameraMetadata frame_settings;
7024 frame_settings = settings;
7025 int32_t x_min = frame_settings.find(tag).data.i32[0];
7026 int32_t y_min = frame_settings.find(tag).data.i32[1];
7027 int32_t x_max = frame_settings.find(tag).data.i32[2];
7028 int32_t y_max = frame_settings.find(tag).data.i32[3];
7029 roi.weight = frame_settings.find(tag).data.i32[4];
7030 roi.rect.left = x_min;
7031 roi.rect.top = y_min;
7032 roi.rect.width = x_max - x_min;
7033 roi.rect.height = y_max - y_min;
7034}
7035
7036/*===========================================================================
7037 * FUNCTION : resetIfNeededROI
7038 *
7039 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7040 * crop region
7041 *
7042 * PARAMETERS :
7043 * @roi : cam_area_t struct to resize
7044 * @scalerCropRegion : cam_crop_region_t region to compare against
7045 *
7046 *
7047 *==========================================================================*/
7048bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7049 const cam_crop_region_t* scalerCropRegion)
7050{
7051 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7052 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7053 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7054 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7055
7056 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7057 * without having this check the calculations below to validate if the roi
7058 * is inside scalar crop region will fail resulting in the roi not being
7059 * reset causing algorithm to continue to use stale roi window
7060 */
7061 if (roi->weight == 0) {
7062 return true;
7063 }
7064
7065 if ((roi_x_max < scalerCropRegion->left) ||
7066 // right edge of roi window is left of scalar crop's left edge
7067 (roi_y_max < scalerCropRegion->top) ||
7068 // bottom edge of roi window is above scalar crop's top edge
7069 (roi->rect.left > crop_x_max) ||
7070 // left edge of roi window is beyond(right) of scalar crop's right edge
7071 (roi->rect.top > crop_y_max)){
7072 // top edge of roi windo is above scalar crop's top edge
7073 return false;
7074 }
7075 if (roi->rect.left < scalerCropRegion->left) {
7076 roi->rect.left = scalerCropRegion->left;
7077 }
7078 if (roi->rect.top < scalerCropRegion->top) {
7079 roi->rect.top = scalerCropRegion->top;
7080 }
7081 if (roi_x_max > crop_x_max) {
7082 roi_x_max = crop_x_max;
7083 }
7084 if (roi_y_max > crop_y_max) {
7085 roi_y_max = crop_y_max;
7086 }
7087 roi->rect.width = roi_x_max - roi->rect.left;
7088 roi->rect.height = roi_y_max - roi->rect.top;
7089 return true;
7090}
7091
7092/*===========================================================================
7093 * FUNCTION : convertLandmarks
7094 *
7095 * DESCRIPTION: helper method to extract the landmarks from face detection info
7096 *
7097 * PARAMETERS :
7098 * @landmark_data : input landmark data to be converted
7099 * @landmarks : int32_t destination array
7100 *
7101 *
7102 *==========================================================================*/
7103void QCamera3HardwareInterface::convertLandmarks(
7104 cam_face_landmarks_info_t landmark_data,
7105 int32_t *landmarks)
7106{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007107 if (landmark_data.is_left_eye_valid) {
7108 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7109 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7110 } else {
7111 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7112 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7113 }
7114
7115 if (landmark_data.is_right_eye_valid) {
7116 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7117 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7118 } else {
7119 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7120 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7121 }
7122
7123 if (landmark_data.is_mouth_valid) {
7124 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7125 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7126 } else {
7127 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7128 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7129 }
7130}
7131
7132/*===========================================================================
7133 * FUNCTION : setInvalidLandmarks
7134 *
7135 * DESCRIPTION: helper method to set invalid landmarks
7136 *
7137 * PARAMETERS :
7138 * @landmarks : int32_t destination array
7139 *
7140 *
7141 *==========================================================================*/
7142void QCamera3HardwareInterface::setInvalidLandmarks(
7143 int32_t *landmarks)
7144{
7145 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7146 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7147 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7148 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7149 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7150 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007151}
7152
7153#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007154
7155/*===========================================================================
7156 * FUNCTION : getCapabilities
7157 *
7158 * DESCRIPTION: query camera capability from back-end
7159 *
7160 * PARAMETERS :
7161 * @ops : mm-interface ops structure
7162 * @cam_handle : camera handle for which we need capability
7163 *
7164 * RETURN : ptr type of capability structure
7165 * capability for success
7166 * NULL for failure
7167 *==========================================================================*/
7168cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7169 uint32_t cam_handle)
7170{
7171 int rc = NO_ERROR;
7172 QCamera3HeapMemory *capabilityHeap = NULL;
7173 cam_capability_t *cap_ptr = NULL;
7174
7175 if (ops == NULL) {
7176 LOGE("Invalid arguments");
7177 return NULL;
7178 }
7179
7180 capabilityHeap = new QCamera3HeapMemory(1);
7181 if (capabilityHeap == NULL) {
7182 LOGE("creation of capabilityHeap failed");
7183 return NULL;
7184 }
7185
7186 /* Allocate memory for capability buffer */
7187 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7188 if(rc != OK) {
7189 LOGE("No memory for cappability");
7190 goto allocate_failed;
7191 }
7192
7193 /* Map memory for capability buffer */
7194 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7195
7196 rc = ops->map_buf(cam_handle,
7197 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7198 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7199 if(rc < 0) {
7200 LOGE("failed to map capability buffer");
7201 rc = FAILED_TRANSACTION;
7202 goto map_failed;
7203 }
7204
7205 /* Query Capability */
7206 rc = ops->query_capability(cam_handle);
7207 if(rc < 0) {
7208 LOGE("failed to query capability");
7209 rc = FAILED_TRANSACTION;
7210 goto query_failed;
7211 }
7212
7213 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7214 if (cap_ptr == NULL) {
7215 LOGE("out of memory");
7216 rc = NO_MEMORY;
7217 goto query_failed;
7218 }
7219
7220 memset(cap_ptr, 0, sizeof(cam_capability_t));
7221 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7222
7223 int index;
7224 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7225 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7226 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7227 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7228 }
7229
7230query_failed:
7231 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7232map_failed:
7233 capabilityHeap->deallocate();
7234allocate_failed:
7235 delete capabilityHeap;
7236
7237 if (rc != NO_ERROR) {
7238 return NULL;
7239 } else {
7240 return cap_ptr;
7241 }
7242}
7243
Thierry Strudel3d639192016-09-09 11:52:26 -07007244/*===========================================================================
7245 * FUNCTION : initCapabilities
7246 *
7247 * DESCRIPTION: initialize camera capabilities in static data struct
7248 *
7249 * PARAMETERS :
7250 * @cameraId : camera Id
7251 *
7252 * RETURN : int32_t type of status
7253 * NO_ERROR -- success
7254 * none-zero failure code
7255 *==========================================================================*/
7256int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7257{
7258 int rc = 0;
7259 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007260 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007261
7262 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7263 if (rc) {
7264 LOGE("camera_open failed. rc = %d", rc);
7265 goto open_failed;
7266 }
7267 if (!cameraHandle) {
7268 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7269 goto open_failed;
7270 }
7271
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007272 handle = get_main_camera_handle(cameraHandle->camera_handle);
7273 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7274 if (gCamCapability[cameraId] == NULL) {
7275 rc = FAILED_TRANSACTION;
7276 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007277 }
7278
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007279 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007280 if (is_dual_camera_by_idx(cameraId)) {
7281 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7282 gCamCapability[cameraId]->aux_cam_cap =
7283 getCapabilities(cameraHandle->ops, handle);
7284 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7285 rc = FAILED_TRANSACTION;
7286 free(gCamCapability[cameraId]);
7287 goto failed_op;
7288 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007289
7290 // Copy the main camera capability to main_cam_cap struct
7291 gCamCapability[cameraId]->main_cam_cap =
7292 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7293 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7294 LOGE("out of memory");
7295 rc = NO_MEMORY;
7296 goto failed_op;
7297 }
7298 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7299 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007300 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007301failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007302 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7303 cameraHandle = NULL;
7304open_failed:
7305 return rc;
7306}
7307
7308/*==========================================================================
7309 * FUNCTION : get3Aversion
7310 *
7311 * DESCRIPTION: get the Q3A S/W version
7312 *
7313 * PARAMETERS :
7314 * @sw_version: Reference of Q3A structure which will hold version info upon
7315 * return
7316 *
7317 * RETURN : None
7318 *
7319 *==========================================================================*/
7320void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7321{
7322 if(gCamCapability[mCameraId])
7323 sw_version = gCamCapability[mCameraId]->q3a_version;
7324 else
7325 LOGE("Capability structure NULL!");
7326}
7327
7328
7329/*===========================================================================
7330 * FUNCTION : initParameters
7331 *
7332 * DESCRIPTION: initialize camera parameters
7333 *
7334 * PARAMETERS :
7335 *
7336 * RETURN : int32_t type of status
7337 * NO_ERROR -- success
7338 * none-zero failure code
7339 *==========================================================================*/
7340int QCamera3HardwareInterface::initParameters()
7341{
7342 int rc = 0;
7343
7344 //Allocate Set Param Buffer
7345 mParamHeap = new QCamera3HeapMemory(1);
7346 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7347 if(rc != OK) {
7348 rc = NO_MEMORY;
7349 LOGE("Failed to allocate SETPARM Heap memory");
7350 delete mParamHeap;
7351 mParamHeap = NULL;
7352 return rc;
7353 }
7354
7355 //Map memory for parameters buffer
7356 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7357 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7358 mParamHeap->getFd(0),
7359 sizeof(metadata_buffer_t),
7360 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7361 if(rc < 0) {
7362 LOGE("failed to map SETPARM buffer");
7363 rc = FAILED_TRANSACTION;
7364 mParamHeap->deallocate();
7365 delete mParamHeap;
7366 mParamHeap = NULL;
7367 return rc;
7368 }
7369
7370 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7371
7372 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7373 return rc;
7374}
7375
7376/*===========================================================================
7377 * FUNCTION : deinitParameters
7378 *
7379 * DESCRIPTION: de-initialize camera parameters
7380 *
7381 * PARAMETERS :
7382 *
7383 * RETURN : NONE
7384 *==========================================================================*/
7385void QCamera3HardwareInterface::deinitParameters()
7386{
7387 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7388 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7389
7390 mParamHeap->deallocate();
7391 delete mParamHeap;
7392 mParamHeap = NULL;
7393
7394 mParameters = NULL;
7395
7396 free(mPrevParameters);
7397 mPrevParameters = NULL;
7398}
7399
7400/*===========================================================================
7401 * FUNCTION : calcMaxJpegSize
7402 *
7403 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7404 *
7405 * PARAMETERS :
7406 *
7407 * RETURN : max_jpeg_size
7408 *==========================================================================*/
7409size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7410{
7411 size_t max_jpeg_size = 0;
7412 size_t temp_width, temp_height;
7413 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7414 MAX_SIZES_CNT);
7415 for (size_t i = 0; i < count; i++) {
7416 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7417 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7418 if (temp_width * temp_height > max_jpeg_size ) {
7419 max_jpeg_size = temp_width * temp_height;
7420 }
7421 }
7422 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7423 return max_jpeg_size;
7424}
7425
7426/*===========================================================================
7427 * FUNCTION : getMaxRawSize
7428 *
7429 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7430 *
7431 * PARAMETERS :
7432 *
7433 * RETURN : Largest supported Raw Dimension
7434 *==========================================================================*/
7435cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7436{
7437 int max_width = 0;
7438 cam_dimension_t maxRawSize;
7439
7440 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7441 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7442 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7443 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7444 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7445 }
7446 }
7447 return maxRawSize;
7448}
7449
7450
7451/*===========================================================================
7452 * FUNCTION : calcMaxJpegDim
7453 *
7454 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7455 *
7456 * PARAMETERS :
7457 *
7458 * RETURN : max_jpeg_dim
7459 *==========================================================================*/
7460cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7461{
7462 cam_dimension_t max_jpeg_dim;
7463 cam_dimension_t curr_jpeg_dim;
7464 max_jpeg_dim.width = 0;
7465 max_jpeg_dim.height = 0;
7466 curr_jpeg_dim.width = 0;
7467 curr_jpeg_dim.height = 0;
7468 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7469 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7470 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7471 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7472 max_jpeg_dim.width * max_jpeg_dim.height ) {
7473 max_jpeg_dim.width = curr_jpeg_dim.width;
7474 max_jpeg_dim.height = curr_jpeg_dim.height;
7475 }
7476 }
7477 return max_jpeg_dim;
7478}
7479
7480/*===========================================================================
7481 * FUNCTION : addStreamConfig
7482 *
7483 * DESCRIPTION: adds the stream configuration to the array
7484 *
7485 * PARAMETERS :
7486 * @available_stream_configs : pointer to stream configuration array
7487 * @scalar_format : scalar format
7488 * @dim : configuration dimension
7489 * @config_type : input or output configuration type
7490 *
7491 * RETURN : NONE
7492 *==========================================================================*/
7493void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7494 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7495{
7496 available_stream_configs.add(scalar_format);
7497 available_stream_configs.add(dim.width);
7498 available_stream_configs.add(dim.height);
7499 available_stream_configs.add(config_type);
7500}
7501
7502/*===========================================================================
7503 * FUNCTION : suppportBurstCapture
7504 *
7505 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7506 *
7507 * PARAMETERS :
7508 * @cameraId : camera Id
7509 *
7510 * RETURN : true if camera supports BURST_CAPTURE
7511 * false otherwise
7512 *==========================================================================*/
7513bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7514{
7515 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7516 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7517 const int32_t highResWidth = 3264;
7518 const int32_t highResHeight = 2448;
7519
7520 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7521 // Maximum resolution images cannot be captured at >= 10fps
7522 // -> not supporting BURST_CAPTURE
7523 return false;
7524 }
7525
7526 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7527 // Maximum resolution images can be captured at >= 20fps
7528 // --> supporting BURST_CAPTURE
7529 return true;
7530 }
7531
7532 // Find the smallest highRes resolution, or largest resolution if there is none
7533 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7534 MAX_SIZES_CNT);
7535 size_t highRes = 0;
7536 while ((highRes + 1 < totalCnt) &&
7537 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7538 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7539 highResWidth * highResHeight)) {
7540 highRes++;
7541 }
7542 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7543 return true;
7544 } else {
7545 return false;
7546 }
7547}
7548
7549/*===========================================================================
7550 * FUNCTION : initStaticMetadata
7551 *
7552 * DESCRIPTION: initialize the static metadata
7553 *
7554 * PARAMETERS :
7555 * @cameraId : camera Id
7556 *
7557 * RETURN : int32_t type of status
7558 * 0 -- success
7559 * non-zero failure code
7560 *==========================================================================*/
7561int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7562{
7563 int rc = 0;
7564 CameraMetadata staticInfo;
7565 size_t count = 0;
7566 bool limitedDevice = false;
7567 char prop[PROPERTY_VALUE_MAX];
7568 bool supportBurst = false;
7569
7570 supportBurst = supportBurstCapture(cameraId);
7571
7572 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7573 * guaranteed or if min fps of max resolution is less than 20 fps, its
7574 * advertised as limited device*/
7575 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7576 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7577 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7578 !supportBurst;
7579
7580 uint8_t supportedHwLvl = limitedDevice ?
7581 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007582#ifndef USE_HAL_3_3
7583 // LEVEL_3 - This device will support level 3.
7584 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7585#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007586 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007587#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007588
7589 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7590 &supportedHwLvl, 1);
7591
7592 bool facingBack = false;
7593 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7594 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7595 facingBack = true;
7596 }
7597 /*HAL 3 only*/
7598 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7599 &gCamCapability[cameraId]->min_focus_distance, 1);
7600
7601 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7602 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7603
7604 /*should be using focal lengths but sensor doesn't provide that info now*/
7605 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7606 &gCamCapability[cameraId]->focal_length,
7607 1);
7608
7609 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7610 gCamCapability[cameraId]->apertures,
7611 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7612
7613 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7614 gCamCapability[cameraId]->filter_densities,
7615 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7616
7617
7618 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7619 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7620 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7621
7622 int32_t lens_shading_map_size[] = {
7623 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7624 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7625 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7626 lens_shading_map_size,
7627 sizeof(lens_shading_map_size)/sizeof(int32_t));
7628
7629 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7630 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7631
7632 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7633 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7634
7635 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7636 &gCamCapability[cameraId]->max_frame_duration, 1);
7637
7638 camera_metadata_rational baseGainFactor = {
7639 gCamCapability[cameraId]->base_gain_factor.numerator,
7640 gCamCapability[cameraId]->base_gain_factor.denominator};
7641 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7642 &baseGainFactor, 1);
7643
7644 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7645 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7646
7647 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7648 gCamCapability[cameraId]->pixel_array_size.height};
7649 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7650 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7651
7652 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7653 gCamCapability[cameraId]->active_array_size.top,
7654 gCamCapability[cameraId]->active_array_size.width,
7655 gCamCapability[cameraId]->active_array_size.height};
7656 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7657 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7658
7659 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7660 &gCamCapability[cameraId]->white_level, 1);
7661
7662 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
7663 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
7664
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007665#ifndef USE_HAL_3_3
7666 bool hasBlackRegions = false;
7667 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7668 LOGW("black_region_count: %d is bounded to %d",
7669 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7670 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7671 }
7672 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7673 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7674 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7675 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7676 }
7677 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7678 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7679 hasBlackRegions = true;
7680 }
7681#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007682 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7683 &gCamCapability[cameraId]->flash_charge_duration, 1);
7684
7685 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7686 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7687
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007688 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7689 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7690 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7691 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007692 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7693 &timestampSource, 1);
7694
7695 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7696 &gCamCapability[cameraId]->histogram_size, 1);
7697
7698 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7699 &gCamCapability[cameraId]->max_histogram_count, 1);
7700
7701 int32_t sharpness_map_size[] = {
7702 gCamCapability[cameraId]->sharpness_map_size.width,
7703 gCamCapability[cameraId]->sharpness_map_size.height};
7704
7705 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7706 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7707
7708 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7709 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7710
7711 int32_t scalar_formats[] = {
7712 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7713 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7714 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7715 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7716 HAL_PIXEL_FORMAT_RAW10,
7717 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7718 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7719 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7720 scalar_formats,
7721 scalar_formats_count);
7722
7723 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
7724 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7725 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
7726 count, MAX_SIZES_CNT, available_processed_sizes);
7727 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
7728 available_processed_sizes, count * 2);
7729
7730 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
7731 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
7732 makeTable(gCamCapability[cameraId]->raw_dim,
7733 count, MAX_SIZES_CNT, available_raw_sizes);
7734 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
7735 available_raw_sizes, count * 2);
7736
7737 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
7738 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
7739 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
7740 count, MAX_SIZES_CNT, available_fps_ranges);
7741 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7742 available_fps_ranges, count * 2);
7743
7744 camera_metadata_rational exposureCompensationStep = {
7745 gCamCapability[cameraId]->exp_compensation_step.numerator,
7746 gCamCapability[cameraId]->exp_compensation_step.denominator};
7747 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
7748 &exposureCompensationStep, 1);
7749
7750 Vector<uint8_t> availableVstabModes;
7751 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
7752 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007753 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07007754 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007755 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07007756 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007757 count = IS_TYPE_MAX;
7758 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
7759 for (size_t i = 0; i < count; i++) {
7760 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
7761 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
7762 eisSupported = true;
7763 break;
7764 }
7765 }
7766 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007767 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
7768 }
7769 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7770 availableVstabModes.array(), availableVstabModes.size());
7771
7772 /*HAL 1 and HAL 3 common*/
7773 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
7774 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
7775 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
7776 float maxZoom = maxZoomStep/minZoomStep;
7777 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7778 &maxZoom, 1);
7779
7780 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
7781 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
7782
7783 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
7784 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
7785 max3aRegions[2] = 0; /* AF not supported */
7786 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
7787 max3aRegions, 3);
7788
7789 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
7790 memset(prop, 0, sizeof(prop));
7791 property_get("persist.camera.facedetect", prop, "1");
7792 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
7793 LOGD("Support face detection mode: %d",
7794 supportedFaceDetectMode);
7795
7796 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07007797 /* support mode should be OFF if max number of face is 0 */
7798 if (maxFaces <= 0) {
7799 supportedFaceDetectMode = 0;
7800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007801 Vector<uint8_t> availableFaceDetectModes;
7802 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
7803 if (supportedFaceDetectMode == 1) {
7804 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7805 } else if (supportedFaceDetectMode == 2) {
7806 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7807 } else if (supportedFaceDetectMode == 3) {
7808 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
7809 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
7810 } else {
7811 maxFaces = 0;
7812 }
7813 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7814 availableFaceDetectModes.array(),
7815 availableFaceDetectModes.size());
7816 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
7817 (int32_t *)&maxFaces, 1);
7818
7819 int32_t exposureCompensationRange[] = {
7820 gCamCapability[cameraId]->exposure_compensation_min,
7821 gCamCapability[cameraId]->exposure_compensation_max};
7822 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
7823 exposureCompensationRange,
7824 sizeof(exposureCompensationRange)/sizeof(int32_t));
7825
7826 uint8_t lensFacing = (facingBack) ?
7827 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
7828 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
7829
7830 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7831 available_thumbnail_sizes,
7832 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
7833
7834 /*all sizes will be clubbed into this tag*/
7835 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
7836 /*android.scaler.availableStreamConfigurations*/
7837 Vector<int32_t> available_stream_configs;
7838 cam_dimension_t active_array_dim;
7839 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
7840 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
7841 /* Add input/output stream configurations for each scalar formats*/
7842 for (size_t j = 0; j < scalar_formats_count; j++) {
7843 switch (scalar_formats[j]) {
7844 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7845 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7846 case HAL_PIXEL_FORMAT_RAW10:
7847 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7848 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7849 addStreamConfig(available_stream_configs, scalar_formats[j],
7850 gCamCapability[cameraId]->raw_dim[i],
7851 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7852 }
7853 break;
7854 case HAL_PIXEL_FORMAT_BLOB:
7855 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7856 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7857 addStreamConfig(available_stream_configs, scalar_formats[j],
7858 gCamCapability[cameraId]->picture_sizes_tbl[i],
7859 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7860 }
7861 break;
7862 case HAL_PIXEL_FORMAT_YCbCr_420_888:
7863 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7864 default:
7865 cam_dimension_t largest_picture_size;
7866 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7867 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7868 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7869 addStreamConfig(available_stream_configs, scalar_formats[j],
7870 gCamCapability[cameraId]->picture_sizes_tbl[i],
7871 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7872 /* Book keep largest */
7873 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7874 >= largest_picture_size.width &&
7875 gCamCapability[cameraId]->picture_sizes_tbl[i].height
7876 >= largest_picture_size.height)
7877 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7878 }
7879 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7880 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7881 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7882 addStreamConfig(available_stream_configs, scalar_formats[j],
7883 largest_picture_size,
7884 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7885 }
7886 break;
7887 }
7888 }
7889
7890 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7891 available_stream_configs.array(), available_stream_configs.size());
7892 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7893 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7894
7895 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7896 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7897
7898 /* android.scaler.availableMinFrameDurations */
7899 Vector<int64_t> available_min_durations;
7900 for (size_t j = 0; j < scalar_formats_count; j++) {
7901 switch (scalar_formats[j]) {
7902 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7903 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7904 case HAL_PIXEL_FORMAT_RAW10:
7905 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7906 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7907 available_min_durations.add(scalar_formats[j]);
7908 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7909 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7910 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7911 }
7912 break;
7913 default:
7914 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7915 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7916 available_min_durations.add(scalar_formats[j]);
7917 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7918 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7919 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7920 }
7921 break;
7922 }
7923 }
7924 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7925 available_min_durations.array(), available_min_durations.size());
7926
7927 Vector<int32_t> available_hfr_configs;
7928 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7929 int32_t fps = 0;
7930 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7931 case CAM_HFR_MODE_60FPS:
7932 fps = 60;
7933 break;
7934 case CAM_HFR_MODE_90FPS:
7935 fps = 90;
7936 break;
7937 case CAM_HFR_MODE_120FPS:
7938 fps = 120;
7939 break;
7940 case CAM_HFR_MODE_150FPS:
7941 fps = 150;
7942 break;
7943 case CAM_HFR_MODE_180FPS:
7944 fps = 180;
7945 break;
7946 case CAM_HFR_MODE_210FPS:
7947 fps = 210;
7948 break;
7949 case CAM_HFR_MODE_240FPS:
7950 fps = 240;
7951 break;
7952 case CAM_HFR_MODE_480FPS:
7953 fps = 480;
7954 break;
7955 case CAM_HFR_MODE_OFF:
7956 case CAM_HFR_MODE_MAX:
7957 default:
7958 break;
7959 }
7960
7961 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7962 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7963 /* For each HFR frame rate, need to advertise one variable fps range
7964 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7965 * and [120, 120]. While camcorder preview alone is running [30, 120] is
7966 * set by the app. When video recording is started, [120, 120] is
7967 * set. This way sensor configuration does not change when recording
7968 * is started */
7969
7970 /* (width, height, fps_min, fps_max, batch_size_max) */
7971 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7972 j < MAX_SIZES_CNT; j++) {
7973 available_hfr_configs.add(
7974 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7975 available_hfr_configs.add(
7976 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7977 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7978 available_hfr_configs.add(fps);
7979 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7980
7981 /* (width, height, fps_min, fps_max, batch_size_max) */
7982 available_hfr_configs.add(
7983 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7984 available_hfr_configs.add(
7985 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7986 available_hfr_configs.add(fps);
7987 available_hfr_configs.add(fps);
7988 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7989 }
7990 }
7991 }
7992 //Advertise HFR capability only if the property is set
7993 memset(prop, 0, sizeof(prop));
7994 property_get("persist.camera.hal3hfr.enable", prop, "1");
7995 uint8_t hfrEnable = (uint8_t)atoi(prop);
7996
7997 if(hfrEnable && available_hfr_configs.array()) {
7998 staticInfo.update(
7999 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8000 available_hfr_configs.array(), available_hfr_configs.size());
8001 }
8002
8003 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8004 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8005 &max_jpeg_size, 1);
8006
8007 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8008 size_t size = 0;
8009 count = CAM_EFFECT_MODE_MAX;
8010 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8011 for (size_t i = 0; i < count; i++) {
8012 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8013 gCamCapability[cameraId]->supported_effects[i]);
8014 if (NAME_NOT_FOUND != val) {
8015 avail_effects[size] = (uint8_t)val;
8016 size++;
8017 }
8018 }
8019 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8020 avail_effects,
8021 size);
8022
8023 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8024 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8025 size_t supported_scene_modes_cnt = 0;
8026 count = CAM_SCENE_MODE_MAX;
8027 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8028 for (size_t i = 0; i < count; i++) {
8029 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8030 CAM_SCENE_MODE_OFF) {
8031 int val = lookupFwkName(SCENE_MODES_MAP,
8032 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8033 gCamCapability[cameraId]->supported_scene_modes[i]);
8034 if (NAME_NOT_FOUND != val) {
8035 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8036 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8037 supported_scene_modes_cnt++;
8038 }
8039 }
8040 }
8041 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8042 avail_scene_modes,
8043 supported_scene_modes_cnt);
8044
8045 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8046 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8047 supported_scene_modes_cnt,
8048 CAM_SCENE_MODE_MAX,
8049 scene_mode_overrides,
8050 supported_indexes,
8051 cameraId);
8052
8053 if (supported_scene_modes_cnt == 0) {
8054 supported_scene_modes_cnt = 1;
8055 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8056 }
8057
8058 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8059 scene_mode_overrides, supported_scene_modes_cnt * 3);
8060
8061 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8062 ANDROID_CONTROL_MODE_AUTO,
8063 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8064 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8065 available_control_modes,
8066 3);
8067
8068 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8069 size = 0;
8070 count = CAM_ANTIBANDING_MODE_MAX;
8071 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8072 for (size_t i = 0; i < count; i++) {
8073 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8074 gCamCapability[cameraId]->supported_antibandings[i]);
8075 if (NAME_NOT_FOUND != val) {
8076 avail_antibanding_modes[size] = (uint8_t)val;
8077 size++;
8078 }
8079
8080 }
8081 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8082 avail_antibanding_modes,
8083 size);
8084
8085 uint8_t avail_abberation_modes[] = {
8086 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8087 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8088 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8089 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8090 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8091 if (0 == count) {
8092 // If no aberration correction modes are available for a device, this advertise OFF mode
8093 size = 1;
8094 } else {
8095 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8096 // So, advertize all 3 modes if atleast any one mode is supported as per the
8097 // new M requirement
8098 size = 3;
8099 }
8100 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8101 avail_abberation_modes,
8102 size);
8103
8104 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8105 size = 0;
8106 count = CAM_FOCUS_MODE_MAX;
8107 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8108 for (size_t i = 0; i < count; i++) {
8109 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8110 gCamCapability[cameraId]->supported_focus_modes[i]);
8111 if (NAME_NOT_FOUND != val) {
8112 avail_af_modes[size] = (uint8_t)val;
8113 size++;
8114 }
8115 }
8116 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8117 avail_af_modes,
8118 size);
8119
8120 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8121 size = 0;
8122 count = CAM_WB_MODE_MAX;
8123 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8124 for (size_t i = 0; i < count; i++) {
8125 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8126 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8127 gCamCapability[cameraId]->supported_white_balances[i]);
8128 if (NAME_NOT_FOUND != val) {
8129 avail_awb_modes[size] = (uint8_t)val;
8130 size++;
8131 }
8132 }
8133 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8134 avail_awb_modes,
8135 size);
8136
8137 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8138 count = CAM_FLASH_FIRING_LEVEL_MAX;
8139 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8140 count);
8141 for (size_t i = 0; i < count; i++) {
8142 available_flash_levels[i] =
8143 gCamCapability[cameraId]->supported_firing_levels[i];
8144 }
8145 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8146 available_flash_levels, count);
8147
8148 uint8_t flashAvailable;
8149 if (gCamCapability[cameraId]->flash_available)
8150 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8151 else
8152 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8153 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8154 &flashAvailable, 1);
8155
8156 Vector<uint8_t> avail_ae_modes;
8157 count = CAM_AE_MODE_MAX;
8158 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8159 for (size_t i = 0; i < count; i++) {
8160 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8161 }
8162 if (flashAvailable) {
8163 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8164 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8165 }
8166 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8167 avail_ae_modes.array(),
8168 avail_ae_modes.size());
8169
8170 int32_t sensitivity_range[2];
8171 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8172 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8173 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8174 sensitivity_range,
8175 sizeof(sensitivity_range) / sizeof(int32_t));
8176
8177 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8178 &gCamCapability[cameraId]->max_analog_sensitivity,
8179 1);
8180
8181 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8182 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8183 &sensor_orientation,
8184 1);
8185
8186 int32_t max_output_streams[] = {
8187 MAX_STALLING_STREAMS,
8188 MAX_PROCESSED_STREAMS,
8189 MAX_RAW_STREAMS};
8190 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8191 max_output_streams,
8192 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8193
8194 uint8_t avail_leds = 0;
8195 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8196 &avail_leds, 0);
8197
8198 uint8_t focus_dist_calibrated;
8199 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8200 gCamCapability[cameraId]->focus_dist_calibrated);
8201 if (NAME_NOT_FOUND != val) {
8202 focus_dist_calibrated = (uint8_t)val;
8203 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8204 &focus_dist_calibrated, 1);
8205 }
8206
8207 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8208 size = 0;
8209 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8210 MAX_TEST_PATTERN_CNT);
8211 for (size_t i = 0; i < count; i++) {
8212 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8213 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8214 if (NAME_NOT_FOUND != testpatternMode) {
8215 avail_testpattern_modes[size] = testpatternMode;
8216 size++;
8217 }
8218 }
8219 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8220 avail_testpattern_modes,
8221 size);
8222
8223 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8224 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8225 &max_pipeline_depth,
8226 1);
8227
8228 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8229 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8230 &partial_result_count,
8231 1);
8232
8233 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8234 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8235
8236 Vector<uint8_t> available_capabilities;
8237 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8238 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8239 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8240 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8241 if (supportBurst) {
8242 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8243 }
8244 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8245 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8246 if (hfrEnable && available_hfr_configs.array()) {
8247 available_capabilities.add(
8248 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8249 }
8250
8251 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8252 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8253 }
8254 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8255 available_capabilities.array(),
8256 available_capabilities.size());
8257
8258 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8259 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8260 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8261 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8262
8263 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8264 &aeLockAvailable, 1);
8265
8266 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8267 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8268 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8269 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8270
8271 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8272 &awbLockAvailable, 1);
8273
8274 int32_t max_input_streams = 1;
8275 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8276 &max_input_streams,
8277 1);
8278
8279 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8280 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8281 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8282 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8283 HAL_PIXEL_FORMAT_YCbCr_420_888};
8284 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8285 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8286
8287 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8288 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8289 &max_latency,
8290 1);
8291
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008292#ifndef USE_HAL_3_3
8293 int32_t isp_sensitivity_range[2];
8294 isp_sensitivity_range[0] =
8295 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8296 isp_sensitivity_range[1] =
8297 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8298 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8299 isp_sensitivity_range,
8300 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8301#endif
8302
Thierry Strudel3d639192016-09-09 11:52:26 -07008303 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8304 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8305 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8306 available_hot_pixel_modes,
8307 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8308
8309 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8310 ANDROID_SHADING_MODE_FAST,
8311 ANDROID_SHADING_MODE_HIGH_QUALITY};
8312 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8313 available_shading_modes,
8314 3);
8315
8316 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8317 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8318 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8319 available_lens_shading_map_modes,
8320 2);
8321
8322 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8323 ANDROID_EDGE_MODE_FAST,
8324 ANDROID_EDGE_MODE_HIGH_QUALITY,
8325 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8326 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8327 available_edge_modes,
8328 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8329
8330 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8331 ANDROID_NOISE_REDUCTION_MODE_FAST,
8332 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8333 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8334 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8335 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8336 available_noise_red_modes,
8337 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8338
8339 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8340 ANDROID_TONEMAP_MODE_FAST,
8341 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8342 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8343 available_tonemap_modes,
8344 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8345
8346 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8347 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8348 available_hot_pixel_map_modes,
8349 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8350
8351 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8352 gCamCapability[cameraId]->reference_illuminant1);
8353 if (NAME_NOT_FOUND != val) {
8354 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8355 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8356 }
8357
8358 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8359 gCamCapability[cameraId]->reference_illuminant2);
8360 if (NAME_NOT_FOUND != val) {
8361 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8362 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8363 }
8364
8365 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8366 (void *)gCamCapability[cameraId]->forward_matrix1,
8367 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8368
8369 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8370 (void *)gCamCapability[cameraId]->forward_matrix2,
8371 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8372
8373 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8374 (void *)gCamCapability[cameraId]->color_transform1,
8375 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8376
8377 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8378 (void *)gCamCapability[cameraId]->color_transform2,
8379 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8380
8381 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8382 (void *)gCamCapability[cameraId]->calibration_transform1,
8383 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8384
8385 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8386 (void *)gCamCapability[cameraId]->calibration_transform2,
8387 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8388
8389 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8390 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8391 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8392 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8393 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8394 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8395 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8396 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8397 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8398 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8399 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8400 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8401 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8402 ANDROID_JPEG_GPS_COORDINATES,
8403 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8404 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8405 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8406 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8407 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8408 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8409 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8410 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8411 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8412 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008413#ifndef USE_HAL_3_3
8414 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8415#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008416 ANDROID_STATISTICS_FACE_DETECT_MODE,
8417 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8418 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8419 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8420 ANDROID_BLACK_LEVEL_LOCK };
8421
8422 size_t request_keys_cnt =
8423 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8424 Vector<int32_t> available_request_keys;
8425 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8426 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8427 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8428 }
8429
8430 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8431 available_request_keys.array(), available_request_keys.size());
8432
8433 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8434 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8435 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8436 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8437 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8438 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8439 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8440 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8441 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8442 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8443 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8444 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8445 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8446 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8447 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8448 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8449 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8450 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8451 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8452 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8453 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008454 ANDROID_STATISTICS_FACE_SCORES,
8455#ifndef USE_HAL_3_3
8456 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8457#endif
8458 };
8459
Thierry Strudel3d639192016-09-09 11:52:26 -07008460 size_t result_keys_cnt =
8461 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8462
8463 Vector<int32_t> available_result_keys;
8464 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8465 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8466 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8467 }
8468 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8469 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8470 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8471 }
8472 if (supportedFaceDetectMode == 1) {
8473 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8474 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8475 } else if ((supportedFaceDetectMode == 2) ||
8476 (supportedFaceDetectMode == 3)) {
8477 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8478 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8479 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008480#ifndef USE_HAL_3_3
8481 if (hasBlackRegions) {
8482 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8483 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8484 }
8485#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008486 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8487 available_result_keys.array(), available_result_keys.size());
8488
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008489 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008490 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8491 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8492 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8493 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8494 ANDROID_SCALER_CROPPING_TYPE,
8495 ANDROID_SYNC_MAX_LATENCY,
8496 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8497 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8498 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8499 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8500 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8501 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8502 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8503 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8504 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8505 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8506 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8507 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8508 ANDROID_LENS_FACING,
8509 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8510 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8511 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8512 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8513 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8514 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8515 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8516 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8517 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8518 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8519 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8520 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8521 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8522 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8523 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8524 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8525 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8526 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8527 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8528 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8529 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8530 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8531 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8532 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8533 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8534 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8535 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8536 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8537 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8538 ANDROID_CONTROL_AVAILABLE_MODES,
8539 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8540 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8541 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8542 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008543 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8544#ifndef USE_HAL_3_3
8545 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8546 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8547#endif
8548 };
8549
8550 Vector<int32_t> available_characteristics_keys;
8551 available_characteristics_keys.appendArray(characteristics_keys_basic,
8552 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8553#ifndef USE_HAL_3_3
8554 if (hasBlackRegions) {
8555 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8556 }
8557#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008558 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008559 available_characteristics_keys.array(),
8560 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008561
8562 /*available stall durations depend on the hw + sw and will be different for different devices */
8563 /*have to add for raw after implementation*/
8564 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8565 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8566
8567 Vector<int64_t> available_stall_durations;
8568 for (uint32_t j = 0; j < stall_formats_count; j++) {
8569 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8570 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8571 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8572 available_stall_durations.add(stall_formats[j]);
8573 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8574 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8575 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8576 }
8577 } else {
8578 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8579 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8580 available_stall_durations.add(stall_formats[j]);
8581 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8582 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8583 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8584 }
8585 }
8586 }
8587 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8588 available_stall_durations.array(),
8589 available_stall_durations.size());
8590
8591 //QCAMERA3_OPAQUE_RAW
8592 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8593 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8594 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8595 case LEGACY_RAW:
8596 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8597 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8598 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8599 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8600 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8601 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8602 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8603 break;
8604 case MIPI_RAW:
8605 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8606 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8607 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8608 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8609 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8610 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8611 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8612 break;
8613 default:
8614 LOGE("unknown opaque_raw_format %d",
8615 gCamCapability[cameraId]->opaque_raw_fmt);
8616 break;
8617 }
8618 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8619
8620 Vector<int32_t> strides;
8621 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8622 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8623 cam_stream_buf_plane_info_t buf_planes;
8624 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8625 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8626 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8627 &gCamCapability[cameraId]->padding_info, &buf_planes);
8628 strides.add(buf_planes.plane_info.mp[0].stride);
8629 }
8630 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8631 strides.size());
8632
Thierry Strudel04e026f2016-10-10 11:27:36 -07008633 //Video HDR default
8634 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8635 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8636 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8637 int32_t vhdr_mode[] = {
8638 QCAMERA3_VIDEO_HDR_MODE_OFF,
8639 QCAMERA3_VIDEO_HDR_MODE_ON};
8640
8641 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8642 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8643 vhdr_mode, vhdr_mode_count);
8644 }
8645
Thierry Strudel3d639192016-09-09 11:52:26 -07008646 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8647 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8648 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8649
8650 uint8_t isMonoOnly =
8651 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8652 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8653 &isMonoOnly, 1);
8654
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008655#ifndef USE_HAL_3_3
8656 Vector<int32_t> opaque_size;
8657 for (size_t j = 0; j < scalar_formats_count; j++) {
8658 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8659 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8660 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8661 cam_stream_buf_plane_info_t buf_planes;
8662
8663 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8664 &gCamCapability[cameraId]->padding_info, &buf_planes);
8665
8666 if (rc == 0) {
8667 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8668 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8669 opaque_size.add(buf_planes.plane_info.frame_len);
8670 }else {
8671 LOGE("raw frame calculation failed!");
8672 }
8673 }
8674 }
8675 }
8676
8677 if ((opaque_size.size() > 0) &&
8678 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
8679 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
8680 else
8681 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
8682#endif
8683
Thierry Strudel04e026f2016-10-10 11:27:36 -07008684 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
8685 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
8686 size = 0;
8687 count = CAM_IR_MODE_MAX;
8688 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
8689 for (size_t i = 0; i < count; i++) {
8690 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
8691 gCamCapability[cameraId]->supported_ir_modes[i]);
8692 if (NAME_NOT_FOUND != val) {
8693 avail_ir_modes[size] = (int32_t)val;
8694 size++;
8695 }
8696 }
8697 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
8698 avail_ir_modes, size);
8699 }
8700
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008701 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
8702 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
8703 size = 0;
8704 count = CAM_AEC_CONVERGENCE_MAX;
8705 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
8706 for (size_t i = 0; i < count; i++) {
8707 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
8708 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
8709 if (NAME_NOT_FOUND != val) {
8710 available_instant_aec_modes[size] = (int32_t)val;
8711 size++;
8712 }
8713 }
8714 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
8715 available_instant_aec_modes, size);
8716 }
8717
Thierry Strudel3d639192016-09-09 11:52:26 -07008718 gStaticMetadata[cameraId] = staticInfo.release();
8719 return rc;
8720}
8721
8722/*===========================================================================
8723 * FUNCTION : makeTable
8724 *
8725 * DESCRIPTION: make a table of sizes
8726 *
8727 * PARAMETERS :
8728 *
8729 *
8730 *==========================================================================*/
8731void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
8732 size_t max_size, int32_t *sizeTable)
8733{
8734 size_t j = 0;
8735 if (size > max_size) {
8736 size = max_size;
8737 }
8738 for (size_t i = 0; i < size; i++) {
8739 sizeTable[j] = dimTable[i].width;
8740 sizeTable[j+1] = dimTable[i].height;
8741 j+=2;
8742 }
8743}
8744
8745/*===========================================================================
8746 * FUNCTION : makeFPSTable
8747 *
8748 * DESCRIPTION: make a table of fps ranges
8749 *
8750 * PARAMETERS :
8751 *
8752 *==========================================================================*/
8753void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
8754 size_t max_size, int32_t *fpsRangesTable)
8755{
8756 size_t j = 0;
8757 if (size > max_size) {
8758 size = max_size;
8759 }
8760 for (size_t i = 0; i < size; i++) {
8761 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
8762 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
8763 j+=2;
8764 }
8765}
8766
8767/*===========================================================================
8768 * FUNCTION : makeOverridesList
8769 *
8770 * DESCRIPTION: make a list of scene mode overrides
8771 *
8772 * PARAMETERS :
8773 *
8774 *
8775 *==========================================================================*/
8776void QCamera3HardwareInterface::makeOverridesList(
8777 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
8778 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
8779{
8780 /*daemon will give a list of overrides for all scene modes.
8781 However we should send the fwk only the overrides for the scene modes
8782 supported by the framework*/
8783 size_t j = 0;
8784 if (size > max_size) {
8785 size = max_size;
8786 }
8787 size_t focus_count = CAM_FOCUS_MODE_MAX;
8788 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
8789 focus_count);
8790 for (size_t i = 0; i < size; i++) {
8791 bool supt = false;
8792 size_t index = supported_indexes[i];
8793 overridesList[j] = gCamCapability[camera_id]->flash_available ?
8794 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
8795 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8796 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8797 overridesTable[index].awb_mode);
8798 if (NAME_NOT_FOUND != val) {
8799 overridesList[j+1] = (uint8_t)val;
8800 }
8801 uint8_t focus_override = overridesTable[index].af_mode;
8802 for (size_t k = 0; k < focus_count; k++) {
8803 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
8804 supt = true;
8805 break;
8806 }
8807 }
8808 if (supt) {
8809 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8810 focus_override);
8811 if (NAME_NOT_FOUND != val) {
8812 overridesList[j+2] = (uint8_t)val;
8813 }
8814 } else {
8815 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
8816 }
8817 j+=3;
8818 }
8819}
8820
8821/*===========================================================================
8822 * FUNCTION : filterJpegSizes
8823 *
8824 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
8825 * could be downscaled to
8826 *
8827 * PARAMETERS :
8828 *
8829 * RETURN : length of jpegSizes array
8830 *==========================================================================*/
8831
8832size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
8833 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
8834 uint8_t downscale_factor)
8835{
8836 if (0 == downscale_factor) {
8837 downscale_factor = 1;
8838 }
8839
8840 int32_t min_width = active_array_size.width / downscale_factor;
8841 int32_t min_height = active_array_size.height / downscale_factor;
8842 size_t jpegSizesCnt = 0;
8843 if (processedSizesCnt > maxCount) {
8844 processedSizesCnt = maxCount;
8845 }
8846 for (size_t i = 0; i < processedSizesCnt; i+=2) {
8847 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
8848 jpegSizes[jpegSizesCnt] = processedSizes[i];
8849 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
8850 jpegSizesCnt += 2;
8851 }
8852 }
8853 return jpegSizesCnt;
8854}
8855
8856/*===========================================================================
8857 * FUNCTION : computeNoiseModelEntryS
8858 *
8859 * DESCRIPTION: function to map a given sensitivity to the S noise
8860 * model parameters in the DNG noise model.
8861 *
8862 * PARAMETERS : sens : the sensor sensitivity
8863 *
8864 ** RETURN : S (sensor amplification) noise
8865 *
8866 *==========================================================================*/
8867double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
8868 double s = gCamCapability[mCameraId]->gradient_S * sens +
8869 gCamCapability[mCameraId]->offset_S;
8870 return ((s < 0.0) ? 0.0 : s);
8871}
8872
8873/*===========================================================================
8874 * FUNCTION : computeNoiseModelEntryO
8875 *
8876 * DESCRIPTION: function to map a given sensitivity to the O noise
8877 * model parameters in the DNG noise model.
8878 *
8879 * PARAMETERS : sens : the sensor sensitivity
8880 *
8881 ** RETURN : O (sensor readout) noise
8882 *
8883 *==========================================================================*/
8884double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8885 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8886 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8887 1.0 : (1.0 * sens / max_analog_sens);
8888 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8889 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8890 return ((o < 0.0) ? 0.0 : o);
8891}
8892
8893/*===========================================================================
8894 * FUNCTION : getSensorSensitivity
8895 *
8896 * DESCRIPTION: convert iso_mode to an integer value
8897 *
8898 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8899 *
8900 ** RETURN : sensitivity supported by sensor
8901 *
8902 *==========================================================================*/
8903int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8904{
8905 int32_t sensitivity;
8906
8907 switch (iso_mode) {
8908 case CAM_ISO_MODE_100:
8909 sensitivity = 100;
8910 break;
8911 case CAM_ISO_MODE_200:
8912 sensitivity = 200;
8913 break;
8914 case CAM_ISO_MODE_400:
8915 sensitivity = 400;
8916 break;
8917 case CAM_ISO_MODE_800:
8918 sensitivity = 800;
8919 break;
8920 case CAM_ISO_MODE_1600:
8921 sensitivity = 1600;
8922 break;
8923 default:
8924 sensitivity = -1;
8925 break;
8926 }
8927 return sensitivity;
8928}
8929
8930/*===========================================================================
8931 * FUNCTION : getCamInfo
8932 *
8933 * DESCRIPTION: query camera capabilities
8934 *
8935 * PARAMETERS :
8936 * @cameraId : camera Id
8937 * @info : camera info struct to be filled in with camera capabilities
8938 *
8939 * RETURN : int type of status
8940 * NO_ERROR -- success
8941 * none-zero failure code
8942 *==========================================================================*/
8943int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8944 struct camera_info *info)
8945{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08008946 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07008947 int rc = 0;
8948
8949 pthread_mutex_lock(&gCamLock);
8950 if (NULL == gCamCapability[cameraId]) {
8951 rc = initCapabilities(cameraId);
8952 if (rc < 0) {
8953 pthread_mutex_unlock(&gCamLock);
8954 return rc;
8955 }
8956 }
8957
8958 if (NULL == gStaticMetadata[cameraId]) {
8959 rc = initStaticMetadata(cameraId);
8960 if (rc < 0) {
8961 pthread_mutex_unlock(&gCamLock);
8962 return rc;
8963 }
8964 }
8965
8966 switch(gCamCapability[cameraId]->position) {
8967 case CAM_POSITION_BACK:
8968 case CAM_POSITION_BACK_AUX:
8969 info->facing = CAMERA_FACING_BACK;
8970 break;
8971
8972 case CAM_POSITION_FRONT:
8973 case CAM_POSITION_FRONT_AUX:
8974 info->facing = CAMERA_FACING_FRONT;
8975 break;
8976
8977 default:
8978 LOGE("Unknown position type %d for camera id:%d",
8979 gCamCapability[cameraId]->position, cameraId);
8980 rc = -1;
8981 break;
8982 }
8983
8984
8985 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008986#ifndef USE_HAL_3_3
8987 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8988#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008989 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008990#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008991 info->static_camera_characteristics = gStaticMetadata[cameraId];
8992
8993 //For now assume both cameras can operate independently.
8994 info->conflicting_devices = NULL;
8995 info->conflicting_devices_length = 0;
8996
8997 //resource cost is 100 * MIN(1.0, m/M),
8998 //where m is throughput requirement with maximum stream configuration
8999 //and M is CPP maximum throughput.
9000 float max_fps = 0.0;
9001 for (uint32_t i = 0;
9002 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9003 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9004 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9005 }
9006 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9007 gCamCapability[cameraId]->active_array_size.width *
9008 gCamCapability[cameraId]->active_array_size.height * max_fps /
9009 gCamCapability[cameraId]->max_pixel_bandwidth;
9010 info->resource_cost = 100 * MIN(1.0, ratio);
9011 LOGI("camera %d resource cost is %d", cameraId,
9012 info->resource_cost);
9013
9014 pthread_mutex_unlock(&gCamLock);
9015 return rc;
9016}
9017
9018/*===========================================================================
9019 * FUNCTION : translateCapabilityToMetadata
9020 *
9021 * DESCRIPTION: translate the capability into camera_metadata_t
9022 *
9023 * PARAMETERS : type of the request
9024 *
9025 *
9026 * RETURN : success: camera_metadata_t*
9027 * failure: NULL
9028 *
9029 *==========================================================================*/
9030camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9031{
9032 if (mDefaultMetadata[type] != NULL) {
9033 return mDefaultMetadata[type];
9034 }
9035 //first time we are handling this request
9036 //fill up the metadata structure using the wrapper class
9037 CameraMetadata settings;
9038 //translate from cam_capability_t to camera_metadata_tag_t
9039 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9040 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9041 int32_t defaultRequestID = 0;
9042 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9043
9044 /* OIS disable */
9045 char ois_prop[PROPERTY_VALUE_MAX];
9046 memset(ois_prop, 0, sizeof(ois_prop));
9047 property_get("persist.camera.ois.disable", ois_prop, "0");
9048 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9049
9050 /* Force video to use OIS */
9051 char videoOisProp[PROPERTY_VALUE_MAX];
9052 memset(videoOisProp, 0, sizeof(videoOisProp));
9053 property_get("persist.camera.ois.video", videoOisProp, "1");
9054 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009055
9056 // Hybrid AE enable/disable
9057 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9058 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9059 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9060 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9061
Thierry Strudel3d639192016-09-09 11:52:26 -07009062 uint8_t controlIntent = 0;
9063 uint8_t focusMode;
9064 uint8_t vsMode;
9065 uint8_t optStabMode;
9066 uint8_t cacMode;
9067 uint8_t edge_mode;
9068 uint8_t noise_red_mode;
9069 uint8_t tonemap_mode;
9070 bool highQualityModeEntryAvailable = FALSE;
9071 bool fastModeEntryAvailable = FALSE;
9072 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9073 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9074 switch (type) {
9075 case CAMERA3_TEMPLATE_PREVIEW:
9076 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9077 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9078 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9079 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9080 edge_mode = ANDROID_EDGE_MODE_FAST;
9081 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9082 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9083 break;
9084 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9085 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9086 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9087 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9088 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9089 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9090 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9091 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9092 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9093 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9094 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9095 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9096 highQualityModeEntryAvailable = TRUE;
9097 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9098 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9099 fastModeEntryAvailable = TRUE;
9100 }
9101 }
9102 if (highQualityModeEntryAvailable) {
9103 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9104 } else if (fastModeEntryAvailable) {
9105 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9106 }
9107 break;
9108 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9109 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9110 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9111 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009112 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9113 edge_mode = ANDROID_EDGE_MODE_FAST;
9114 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9115 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9116 if (forceVideoOis)
9117 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9118 break;
9119 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9120 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9121 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9122 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009123 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9124 edge_mode = ANDROID_EDGE_MODE_FAST;
9125 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9126 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9127 if (forceVideoOis)
9128 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9129 break;
9130 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9131 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9132 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9133 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9134 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9135 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9136 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9137 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9138 break;
9139 case CAMERA3_TEMPLATE_MANUAL:
9140 edge_mode = ANDROID_EDGE_MODE_FAST;
9141 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9142 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9143 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9144 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9145 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9146 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9147 break;
9148 default:
9149 edge_mode = ANDROID_EDGE_MODE_FAST;
9150 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9151 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9152 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9153 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9154 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9155 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9156 break;
9157 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009158 // Set CAC to OFF if underlying device doesn't support
9159 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9160 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9161 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009162 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9163 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9164 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9165 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9166 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9167 }
9168 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9169
9170 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9171 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9172 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9173 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9174 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9175 || ois_disable)
9176 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9177 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9178
9179 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9180 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9181
9182 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9183 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9184
9185 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9186 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9187
9188 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9189 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9190
9191 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9192 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9193
9194 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9195 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9196
9197 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9198 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9199
9200 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9201 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9202
9203 /*flash*/
9204 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9205 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9206
9207 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9208 settings.update(ANDROID_FLASH_FIRING_POWER,
9209 &flashFiringLevel, 1);
9210
9211 /* lens */
9212 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9213 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9214
9215 if (gCamCapability[mCameraId]->filter_densities_count) {
9216 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9217 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9218 gCamCapability[mCameraId]->filter_densities_count);
9219 }
9220
9221 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9222 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9223
9224 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9225 float default_focus_distance = 0;
9226 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9227 }
9228
9229 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9230 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9231
9232 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9233 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9234
9235 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9236 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9237
9238 /* face detection (default to OFF) */
9239 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9240 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9241
9242 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9243 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9244
9245 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9246 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9247
9248 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9249 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9250
9251 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9252 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9253
9254 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9255 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9256
9257 /* Exposure time(Update the Min Exposure Time)*/
9258 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9259 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9260
9261 /* frame duration */
9262 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9263 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9264
9265 /* sensitivity */
9266 static const int32_t default_sensitivity = 100;
9267 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009268#ifndef USE_HAL_3_3
9269 static const int32_t default_isp_sensitivity =
9270 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9271 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9272#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009273
9274 /*edge mode*/
9275 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9276
9277 /*noise reduction mode*/
9278 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9279
9280 /*color correction mode*/
9281 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9282 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9283
9284 /*transform matrix mode*/
9285 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9286
9287 int32_t scaler_crop_region[4];
9288 scaler_crop_region[0] = 0;
9289 scaler_crop_region[1] = 0;
9290 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9291 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9292 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9293
9294 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9295 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9296
9297 /*focus distance*/
9298 float focus_distance = 0.0;
9299 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9300
9301 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009302 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009303 float max_range = 0.0;
9304 float max_fixed_fps = 0.0;
9305 int32_t fps_range[2] = {0, 0};
9306 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9307 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009308 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9309 TEMPLATE_MAX_PREVIEW_FPS) {
9310 continue;
9311 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009312 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9313 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9314 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9315 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9316 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9317 if (range > max_range) {
9318 fps_range[0] =
9319 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9320 fps_range[1] =
9321 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9322 max_range = range;
9323 }
9324 } else {
9325 if (range < 0.01 && max_fixed_fps <
9326 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9327 fps_range[0] =
9328 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9329 fps_range[1] =
9330 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9331 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9332 }
9333 }
9334 }
9335 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9336
9337 /*precapture trigger*/
9338 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9339 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9340
9341 /*af trigger*/
9342 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9343 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9344
9345 /* ae & af regions */
9346 int32_t active_region[] = {
9347 gCamCapability[mCameraId]->active_array_size.left,
9348 gCamCapability[mCameraId]->active_array_size.top,
9349 gCamCapability[mCameraId]->active_array_size.left +
9350 gCamCapability[mCameraId]->active_array_size.width,
9351 gCamCapability[mCameraId]->active_array_size.top +
9352 gCamCapability[mCameraId]->active_array_size.height,
9353 0};
9354 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9355 sizeof(active_region) / sizeof(active_region[0]));
9356 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9357 sizeof(active_region) / sizeof(active_region[0]));
9358
9359 /* black level lock */
9360 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9361 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9362
9363 /* lens shading map mode */
9364 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9365 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9366 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9367 }
9368 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9369
9370 //special defaults for manual template
9371 if (type == CAMERA3_TEMPLATE_MANUAL) {
9372 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9373 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9374
9375 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9376 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9377
9378 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9379 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9380
9381 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9382 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9383
9384 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9385 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9386
9387 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9388 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9389 }
9390
9391
9392 /* TNR
9393 * We'll use this location to determine which modes TNR will be set.
9394 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9395 * This is not to be confused with linking on a per stream basis that decision
9396 * is still on per-session basis and will be handled as part of config stream
9397 */
9398 uint8_t tnr_enable = 0;
9399
9400 if (m_bTnrPreview || m_bTnrVideo) {
9401
9402 switch (type) {
9403 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9404 tnr_enable = 1;
9405 break;
9406
9407 default:
9408 tnr_enable = 0;
9409 break;
9410 }
9411
9412 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9413 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9414 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9415
9416 LOGD("TNR:%d with process plate %d for template:%d",
9417 tnr_enable, tnr_process_type, type);
9418 }
9419
9420 //Update Link tags to default
9421 int32_t sync_type = CAM_TYPE_STANDALONE;
9422 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9423
9424 int32_t is_main = 0; //this doesn't matter as app should overwrite
9425 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9426
9427 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9428
9429 /* CDS default */
9430 char prop[PROPERTY_VALUE_MAX];
9431 memset(prop, 0, sizeof(prop));
9432 property_get("persist.camera.CDS", prop, "Auto");
9433 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9434 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9435 if (CAM_CDS_MODE_MAX == cds_mode) {
9436 cds_mode = CAM_CDS_MODE_AUTO;
9437 }
9438
9439 /* Disabling CDS in templates which have TNR enabled*/
9440 if (tnr_enable)
9441 cds_mode = CAM_CDS_MODE_OFF;
9442
9443 int32_t mode = cds_mode;
9444 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009445
9446 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9447 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9448
9449 /* IR Mode Default Off */
9450 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9451 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9452
Thierry Strudel269c81a2016-10-12 12:13:59 -07009453 /* Manual Convergence AEC Speed is disabled by default*/
9454 float default_aec_speed = 0;
9455 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9456
9457 /* Manual Convergence AWB Speed is disabled by default*/
9458 float default_awb_speed = 0;
9459 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9460
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009461 // Set instant AEC to normal convergence by default
9462 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9463 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9464
Shuzhen Wang19463d72016-03-08 11:09:52 -08009465 /* hybrid ae */
9466 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9467
Thierry Strudel3d639192016-09-09 11:52:26 -07009468 mDefaultMetadata[type] = settings.release();
9469
9470 return mDefaultMetadata[type];
9471}
9472
9473/*===========================================================================
9474 * FUNCTION : setFrameParameters
9475 *
9476 * DESCRIPTION: set parameters per frame as requested in the metadata from
9477 * framework
9478 *
9479 * PARAMETERS :
9480 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009481 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009482 * @blob_request: Whether this request is a blob request or not
9483 *
9484 * RETURN : success: NO_ERROR
9485 * failure:
9486 *==========================================================================*/
9487int QCamera3HardwareInterface::setFrameParameters(
9488 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009489 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009490 int blob_request,
9491 uint32_t snapshotStreamId)
9492{
9493 /*translate from camera_metadata_t type to parm_type_t*/
9494 int rc = 0;
9495 int32_t hal_version = CAM_HAL_V3;
9496
9497 clear_metadata_buffer(mParameters);
9498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9499 LOGE("Failed to set hal version in the parameters");
9500 return BAD_VALUE;
9501 }
9502
9503 /*we need to update the frame number in the parameters*/
9504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9505 request->frame_number)) {
9506 LOGE("Failed to set the frame number in the parameters");
9507 return BAD_VALUE;
9508 }
9509
9510 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009512 LOGE("Failed to set stream type mask in the parameters");
9513 return BAD_VALUE;
9514 }
9515
9516 if (mUpdateDebugLevel) {
9517 uint32_t dummyDebugLevel = 0;
9518 /* The value of dummyDebugLevel is irrelavent. On
9519 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9521 dummyDebugLevel)) {
9522 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9523 return BAD_VALUE;
9524 }
9525 mUpdateDebugLevel = false;
9526 }
9527
9528 if(request->settings != NULL){
9529 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9530 if (blob_request)
9531 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9532 }
9533
9534 return rc;
9535}
9536
9537/*===========================================================================
9538 * FUNCTION : setReprocParameters
9539 *
9540 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9541 * return it.
9542 *
9543 * PARAMETERS :
9544 * @request : request that needs to be serviced
9545 *
9546 * RETURN : success: NO_ERROR
9547 * failure:
9548 *==========================================================================*/
9549int32_t QCamera3HardwareInterface::setReprocParameters(
9550 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9551 uint32_t snapshotStreamId)
9552{
9553 /*translate from camera_metadata_t type to parm_type_t*/
9554 int rc = 0;
9555
9556 if (NULL == request->settings){
9557 LOGE("Reprocess settings cannot be NULL");
9558 return BAD_VALUE;
9559 }
9560
9561 if (NULL == reprocParam) {
9562 LOGE("Invalid reprocessing metadata buffer");
9563 return BAD_VALUE;
9564 }
9565 clear_metadata_buffer(reprocParam);
9566
9567 /*we need to update the frame number in the parameters*/
9568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9569 request->frame_number)) {
9570 LOGE("Failed to set the frame number in the parameters");
9571 return BAD_VALUE;
9572 }
9573
9574 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9575 if (rc < 0) {
9576 LOGE("Failed to translate reproc request");
9577 return rc;
9578 }
9579
9580 CameraMetadata frame_settings;
9581 frame_settings = request->settings;
9582 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9583 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9584 int32_t *crop_count =
9585 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9586 int32_t *crop_data =
9587 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9588 int32_t *roi_map =
9589 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9590 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9591 cam_crop_data_t crop_meta;
9592 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9593 crop_meta.num_of_streams = 1;
9594 crop_meta.crop_info[0].crop.left = crop_data[0];
9595 crop_meta.crop_info[0].crop.top = crop_data[1];
9596 crop_meta.crop_info[0].crop.width = crop_data[2];
9597 crop_meta.crop_info[0].crop.height = crop_data[3];
9598
9599 crop_meta.crop_info[0].roi_map.left =
9600 roi_map[0];
9601 crop_meta.crop_info[0].roi_map.top =
9602 roi_map[1];
9603 crop_meta.crop_info[0].roi_map.width =
9604 roi_map[2];
9605 crop_meta.crop_info[0].roi_map.height =
9606 roi_map[3];
9607
9608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9609 rc = BAD_VALUE;
9610 }
9611 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9612 request->input_buffer->stream,
9613 crop_meta.crop_info[0].crop.left,
9614 crop_meta.crop_info[0].crop.top,
9615 crop_meta.crop_info[0].crop.width,
9616 crop_meta.crop_info[0].crop.height);
9617 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9618 request->input_buffer->stream,
9619 crop_meta.crop_info[0].roi_map.left,
9620 crop_meta.crop_info[0].roi_map.top,
9621 crop_meta.crop_info[0].roi_map.width,
9622 crop_meta.crop_info[0].roi_map.height);
9623 } else {
9624 LOGE("Invalid reprocess crop count %d!", *crop_count);
9625 }
9626 } else {
9627 LOGE("No crop data from matching output stream");
9628 }
9629
9630 /* These settings are not needed for regular requests so handle them specially for
9631 reprocess requests; information needed for EXIF tags */
9632 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9633 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9634 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9635 if (NAME_NOT_FOUND != val) {
9636 uint32_t flashMode = (uint32_t)val;
9637 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9638 rc = BAD_VALUE;
9639 }
9640 } else {
9641 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9642 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9643 }
9644 } else {
9645 LOGH("No flash mode in reprocess settings");
9646 }
9647
9648 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9649 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9651 rc = BAD_VALUE;
9652 }
9653 } else {
9654 LOGH("No flash state in reprocess settings");
9655 }
9656
9657 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9658 uint8_t *reprocessFlags =
9659 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9661 *reprocessFlags)) {
9662 rc = BAD_VALUE;
9663 }
9664 }
9665
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009666 // Add metadata which reprocess needs
9667 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9668 cam_reprocess_info_t *repro_info =
9669 (cam_reprocess_info_t *)frame_settings.find
9670 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009671 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009672 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009673 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009674 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009675 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009676 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009677 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009678 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009679 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009680 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -07009681 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009682 repro_info->pipeline_flip);
9683 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
9684 repro_info->af_roi);
9685 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
9686 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -07009687 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
9688 CAM_INTF_PARM_ROTATION metadata then has been added in
9689 translateToHalMetadata. HAL need to keep this new rotation
9690 metadata. Otherwise, the old rotation info saved in the vendor tag
9691 would be used */
9692 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
9693 CAM_INTF_PARM_ROTATION, reprocParam) {
9694 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
9695 } else {
9696 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009697 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07009698 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009699 }
9700
9701 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
9702 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
9703 roi.width and roi.height would be the final JPEG size.
9704 For now, HAL only checks this for reprocess request */
9705 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
9706 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
9707 uint8_t *enable =
9708 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
9709 if (*enable == TRUE) {
9710 int32_t *crop_data =
9711 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
9712 cam_stream_crop_info_t crop_meta;
9713 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
9714 crop_meta.stream_id = 0;
9715 crop_meta.crop.left = crop_data[0];
9716 crop_meta.crop.top = crop_data[1];
9717 crop_meta.crop.width = crop_data[2];
9718 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009719 // The JPEG crop roi should match cpp output size
9720 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
9721 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
9722 crop_meta.roi_map.left = 0;
9723 crop_meta.roi_map.top = 0;
9724 crop_meta.roi_map.width = cpp_crop->crop.width;
9725 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07009726 }
9727 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
9728 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009729 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07009730 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009731 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
9732 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07009733 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009734 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
9735
9736 // Add JPEG scale information
9737 cam_dimension_t scale_dim;
9738 memset(&scale_dim, 0, sizeof(cam_dimension_t));
9739 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
9740 int32_t *roi =
9741 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
9742 scale_dim.width = roi[2];
9743 scale_dim.height = roi[3];
9744 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
9745 scale_dim);
9746 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
9747 scale_dim.width, scale_dim.height, mCameraId);
9748 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009749 }
9750 }
9751
9752 return rc;
9753}
9754
9755/*===========================================================================
9756 * FUNCTION : saveRequestSettings
9757 *
9758 * DESCRIPTION: Add any settings that might have changed to the request settings
9759 * and save the settings to be applied on the frame
9760 *
9761 * PARAMETERS :
9762 * @jpegMetadata : the extracted and/or modified jpeg metadata
9763 * @request : request with initial settings
9764 *
9765 * RETURN :
9766 * camera_metadata_t* : pointer to the saved request settings
9767 *==========================================================================*/
9768camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
9769 const CameraMetadata &jpegMetadata,
9770 camera3_capture_request_t *request)
9771{
9772 camera_metadata_t *resultMetadata;
9773 CameraMetadata camMetadata;
9774 camMetadata = request->settings;
9775
9776 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9777 int32_t thumbnail_size[2];
9778 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9779 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9780 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
9781 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
9782 }
9783
9784 if (request->input_buffer != NULL) {
9785 uint8_t reprocessFlags = 1;
9786 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
9787 (uint8_t*)&reprocessFlags,
9788 sizeof(reprocessFlags));
9789 }
9790
9791 resultMetadata = camMetadata.release();
9792 return resultMetadata;
9793}
9794
9795/*===========================================================================
9796 * FUNCTION : setHalFpsRange
9797 *
9798 * DESCRIPTION: set FPS range parameter
9799 *
9800 *
9801 * PARAMETERS :
9802 * @settings : Metadata from framework
9803 * @hal_metadata: Metadata buffer
9804 *
9805 *
9806 * RETURN : success: NO_ERROR
9807 * failure:
9808 *==========================================================================*/
9809int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
9810 metadata_buffer_t *hal_metadata)
9811{
9812 int32_t rc = NO_ERROR;
9813 cam_fps_range_t fps_range;
9814 fps_range.min_fps = (float)
9815 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
9816 fps_range.max_fps = (float)
9817 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
9818 fps_range.video_min_fps = fps_range.min_fps;
9819 fps_range.video_max_fps = fps_range.max_fps;
9820
9821 LOGD("aeTargetFpsRange fps: [%f %f]",
9822 fps_range.min_fps, fps_range.max_fps);
9823 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
9824 * follows:
9825 * ---------------------------------------------------------------|
9826 * Video stream is absent in configure_streams |
9827 * (Camcorder preview before the first video record |
9828 * ---------------------------------------------------------------|
9829 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9830 * | | | vid_min/max_fps|
9831 * ---------------------------------------------------------------|
9832 * NO | [ 30, 240] | 240 | [240, 240] |
9833 * |-------------|-------------|----------------|
9834 * | [240, 240] | 240 | [240, 240] |
9835 * ---------------------------------------------------------------|
9836 * Video stream is present in configure_streams |
9837 * ---------------------------------------------------------------|
9838 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
9839 * | | | vid_min/max_fps|
9840 * ---------------------------------------------------------------|
9841 * NO | [ 30, 240] | 240 | [240, 240] |
9842 * (camcorder prev |-------------|-------------|----------------|
9843 * after video rec | [240, 240] | 240 | [240, 240] |
9844 * is stopped) | | | |
9845 * ---------------------------------------------------------------|
9846 * YES | [ 30, 240] | 240 | [240, 240] |
9847 * |-------------|-------------|----------------|
9848 * | [240, 240] | 240 | [240, 240] |
9849 * ---------------------------------------------------------------|
9850 * When Video stream is absent in configure_streams,
9851 * preview fps = sensor_fps / batchsize
9852 * Eg: for 240fps at batchSize 4, preview = 60fps
9853 * for 120fps at batchSize 4, preview = 30fps
9854 *
9855 * When video stream is present in configure_streams, preview fps is as per
9856 * the ratio of preview buffers to video buffers requested in process
9857 * capture request
9858 */
9859 mBatchSize = 0;
9860 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
9861 fps_range.min_fps = fps_range.video_max_fps;
9862 fps_range.video_min_fps = fps_range.video_max_fps;
9863 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
9864 fps_range.max_fps);
9865 if (NAME_NOT_FOUND != val) {
9866 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
9867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9868 return BAD_VALUE;
9869 }
9870
9871 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
9872 /* If batchmode is currently in progress and the fps changes,
9873 * set the flag to restart the sensor */
9874 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
9875 (mHFRVideoFps != fps_range.max_fps)) {
9876 mNeedSensorRestart = true;
9877 }
9878 mHFRVideoFps = fps_range.max_fps;
9879 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
9880 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
9881 mBatchSize = MAX_HFR_BATCH_SIZE;
9882 }
9883 }
9884 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
9885
9886 }
9887 } else {
9888 /* HFR mode is session param in backend/ISP. This should be reset when
9889 * in non-HFR mode */
9890 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
9891 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
9892 return BAD_VALUE;
9893 }
9894 }
9895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
9896 return BAD_VALUE;
9897 }
9898 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
9899 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
9900 return rc;
9901}
9902
9903/*===========================================================================
9904 * FUNCTION : translateToHalMetadata
9905 *
9906 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9907 *
9908 *
9909 * PARAMETERS :
9910 * @request : request sent from framework
9911 *
9912 *
9913 * RETURN : success: NO_ERROR
9914 * failure:
9915 *==========================================================================*/
9916int QCamera3HardwareInterface::translateToHalMetadata
9917 (const camera3_capture_request_t *request,
9918 metadata_buffer_t *hal_metadata,
9919 uint32_t snapshotStreamId)
9920{
9921 int rc = 0;
9922 CameraMetadata frame_settings;
9923 frame_settings = request->settings;
9924
9925 /* Do not change the order of the following list unless you know what you are
9926 * doing.
9927 * The order is laid out in such a way that parameters in the front of the table
9928 * may be used to override the parameters later in the table. Examples are:
9929 * 1. META_MODE should precede AEC/AWB/AF MODE
9930 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9931 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9932 * 4. Any mode should precede it's corresponding settings
9933 */
9934 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9935 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9937 rc = BAD_VALUE;
9938 }
9939 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9940 if (rc != NO_ERROR) {
9941 LOGE("extractSceneMode failed");
9942 }
9943 }
9944
9945 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9946 uint8_t fwk_aeMode =
9947 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9948 uint8_t aeMode;
9949 int32_t redeye;
9950
9951 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9952 aeMode = CAM_AE_MODE_OFF;
9953 } else {
9954 aeMode = CAM_AE_MODE_ON;
9955 }
9956 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9957 redeye = 1;
9958 } else {
9959 redeye = 0;
9960 }
9961
9962 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9963 fwk_aeMode);
9964 if (NAME_NOT_FOUND != val) {
9965 int32_t flashMode = (int32_t)val;
9966 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9967 }
9968
9969 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9970 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9971 rc = BAD_VALUE;
9972 }
9973 }
9974
9975 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9976 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9977 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9978 fwk_whiteLevel);
9979 if (NAME_NOT_FOUND != val) {
9980 uint8_t whiteLevel = (uint8_t)val;
9981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9982 rc = BAD_VALUE;
9983 }
9984 }
9985 }
9986
9987 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9988 uint8_t fwk_cacMode =
9989 frame_settings.find(
9990 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9991 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9992 fwk_cacMode);
9993 if (NAME_NOT_FOUND != val) {
9994 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9995 bool entryAvailable = FALSE;
9996 // Check whether Frameworks set CAC mode is supported in device or not
9997 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9998 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9999 entryAvailable = TRUE;
10000 break;
10001 }
10002 }
10003 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10004 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10005 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10006 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10007 if (entryAvailable == FALSE) {
10008 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10009 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10010 } else {
10011 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10012 // High is not supported and so set the FAST as spec say's underlying
10013 // device implementation can be the same for both modes.
10014 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10015 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10016 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10017 // in order to avoid the fps drop due to high quality
10018 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10019 } else {
10020 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10021 }
10022 }
10023 }
10024 LOGD("Final cacMode is %d", cacMode);
10025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10026 rc = BAD_VALUE;
10027 }
10028 } else {
10029 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10030 }
10031 }
10032
10033 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10034 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10035 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10036 fwk_focusMode);
10037 if (NAME_NOT_FOUND != val) {
10038 uint8_t focusMode = (uint8_t)val;
10039 LOGD("set focus mode %d", focusMode);
10040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10041 rc = BAD_VALUE;
10042 }
10043 }
10044 }
10045
10046 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10047 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10048 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10049 focalDistance)) {
10050 rc = BAD_VALUE;
10051 }
10052 }
10053
10054 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10055 uint8_t fwk_antibandingMode =
10056 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10057 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10058 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10059 if (NAME_NOT_FOUND != val) {
10060 uint32_t hal_antibandingMode = (uint32_t)val;
10061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10062 hal_antibandingMode)) {
10063 rc = BAD_VALUE;
10064 }
10065 }
10066 }
10067
10068 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10069 int32_t expCompensation = frame_settings.find(
10070 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10071 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10072 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10073 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10074 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010075 ALOGE("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10077 expCompensation)) {
10078 rc = BAD_VALUE;
10079 }
10080 }
10081
10082 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10083 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10085 rc = BAD_VALUE;
10086 }
10087 }
10088 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10089 rc = setHalFpsRange(frame_settings, hal_metadata);
10090 if (rc != NO_ERROR) {
10091 LOGE("setHalFpsRange failed");
10092 }
10093 }
10094
10095 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10096 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10097 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10098 rc = BAD_VALUE;
10099 }
10100 }
10101
10102 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10103 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10104 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10105 fwk_effectMode);
10106 if (NAME_NOT_FOUND != val) {
10107 uint8_t effectMode = (uint8_t)val;
10108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10109 rc = BAD_VALUE;
10110 }
10111 }
10112 }
10113
10114 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10115 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10117 colorCorrectMode)) {
10118 rc = BAD_VALUE;
10119 }
10120 }
10121
10122 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10123 cam_color_correct_gains_t colorCorrectGains;
10124 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10125 colorCorrectGains.gains[i] =
10126 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10127 }
10128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10129 colorCorrectGains)) {
10130 rc = BAD_VALUE;
10131 }
10132 }
10133
10134 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10135 cam_color_correct_matrix_t colorCorrectTransform;
10136 cam_rational_type_t transform_elem;
10137 size_t num = 0;
10138 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10139 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10140 transform_elem.numerator =
10141 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10142 transform_elem.denominator =
10143 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10144 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10145 num++;
10146 }
10147 }
10148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10149 colorCorrectTransform)) {
10150 rc = BAD_VALUE;
10151 }
10152 }
10153
10154 cam_trigger_t aecTrigger;
10155 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10156 aecTrigger.trigger_id = -1;
10157 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10158 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10159 aecTrigger.trigger =
10160 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10161 aecTrigger.trigger_id =
10162 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10164 aecTrigger)) {
10165 rc = BAD_VALUE;
10166 }
10167 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10168 aecTrigger.trigger, aecTrigger.trigger_id);
10169 }
10170
10171 /*af_trigger must come with a trigger id*/
10172 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10173 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10174 cam_trigger_t af_trigger;
10175 af_trigger.trigger =
10176 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10177 af_trigger.trigger_id =
10178 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10180 rc = BAD_VALUE;
10181 }
10182 LOGD("AfTrigger: %d AfTriggerID: %d",
10183 af_trigger.trigger, af_trigger.trigger_id);
10184 }
10185
10186 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10187 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10189 rc = BAD_VALUE;
10190 }
10191 }
10192 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10193 cam_edge_application_t edge_application;
10194 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10195 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10196 edge_application.sharpness = 0;
10197 } else {
10198 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10199 }
10200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10201 rc = BAD_VALUE;
10202 }
10203 }
10204
10205 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10206 int32_t respectFlashMode = 1;
10207 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10208 uint8_t fwk_aeMode =
10209 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10210 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10211 respectFlashMode = 0;
10212 LOGH("AE Mode controls flash, ignore android.flash.mode");
10213 }
10214 }
10215 if (respectFlashMode) {
10216 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10217 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10218 LOGH("flash mode after mapping %d", val);
10219 // To check: CAM_INTF_META_FLASH_MODE usage
10220 if (NAME_NOT_FOUND != val) {
10221 uint8_t flashMode = (uint8_t)val;
10222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10223 rc = BAD_VALUE;
10224 }
10225 }
10226 }
10227 }
10228
10229 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10230 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10232 rc = BAD_VALUE;
10233 }
10234 }
10235
10236 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10237 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10239 flashFiringTime)) {
10240 rc = BAD_VALUE;
10241 }
10242 }
10243
10244 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10245 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10247 hotPixelMode)) {
10248 rc = BAD_VALUE;
10249 }
10250 }
10251
10252 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10253 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10255 lensAperture)) {
10256 rc = BAD_VALUE;
10257 }
10258 }
10259
10260 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10261 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10262 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10263 filterDensity)) {
10264 rc = BAD_VALUE;
10265 }
10266 }
10267
10268 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10269 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10270 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10271 focalLength)) {
10272 rc = BAD_VALUE;
10273 }
10274 }
10275
10276 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10277 uint8_t optStabMode =
10278 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10280 optStabMode)) {
10281 rc = BAD_VALUE;
10282 }
10283 }
10284
10285 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10286 uint8_t videoStabMode =
10287 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10288 LOGD("videoStabMode from APP = %d", videoStabMode);
10289 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10290 videoStabMode)) {
10291 rc = BAD_VALUE;
10292 }
10293 }
10294
10295
10296 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10297 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10298 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10299 noiseRedMode)) {
10300 rc = BAD_VALUE;
10301 }
10302 }
10303
10304 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10305 float reprocessEffectiveExposureFactor =
10306 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10307 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10308 reprocessEffectiveExposureFactor)) {
10309 rc = BAD_VALUE;
10310 }
10311 }
10312
10313 cam_crop_region_t scalerCropRegion;
10314 bool scalerCropSet = false;
10315 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10316 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10317 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10318 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10319 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10320
10321 // Map coordinate system from active array to sensor output.
10322 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10323 scalerCropRegion.width, scalerCropRegion.height);
10324
10325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10326 scalerCropRegion)) {
10327 rc = BAD_VALUE;
10328 }
10329 scalerCropSet = true;
10330 }
10331
10332 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10333 int64_t sensorExpTime =
10334 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10335 LOGD("setting sensorExpTime %lld", sensorExpTime);
10336 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10337 sensorExpTime)) {
10338 rc = BAD_VALUE;
10339 }
10340 }
10341
10342 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10343 int64_t sensorFrameDuration =
10344 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10345 int64_t minFrameDuration = getMinFrameDuration(request);
10346 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10347 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10348 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10349 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10351 sensorFrameDuration)) {
10352 rc = BAD_VALUE;
10353 }
10354 }
10355
10356 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10357 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10358 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10359 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10360 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10361 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10362 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10364 sensorSensitivity)) {
10365 rc = BAD_VALUE;
10366 }
10367 }
10368
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010369#ifndef USE_HAL_3_3
10370 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10371 int32_t ispSensitivity =
10372 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10373 if (ispSensitivity <
10374 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10375 ispSensitivity =
10376 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10377 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10378 }
10379 if (ispSensitivity >
10380 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10381 ispSensitivity =
10382 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10383 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10384 }
10385 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10386 ispSensitivity)) {
10387 rc = BAD_VALUE;
10388 }
10389 }
10390#endif
10391
Thierry Strudel3d639192016-09-09 11:52:26 -070010392 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10393 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10394 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10395 rc = BAD_VALUE;
10396 }
10397 }
10398
10399 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10400 uint8_t fwk_facedetectMode =
10401 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10402
10403 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10404 fwk_facedetectMode);
10405
10406 if (NAME_NOT_FOUND != val) {
10407 uint8_t facedetectMode = (uint8_t)val;
10408 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10409 facedetectMode)) {
10410 rc = BAD_VALUE;
10411 }
10412 }
10413 }
10414
10415 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10416 uint8_t histogramMode =
10417 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10419 histogramMode)) {
10420 rc = BAD_VALUE;
10421 }
10422 }
10423
10424 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10425 uint8_t sharpnessMapMode =
10426 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10427 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10428 sharpnessMapMode)) {
10429 rc = BAD_VALUE;
10430 }
10431 }
10432
10433 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10434 uint8_t tonemapMode =
10435 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10437 rc = BAD_VALUE;
10438 }
10439 }
10440 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10441 /*All tonemap channels will have the same number of points*/
10442 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10443 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10444 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10445 cam_rgb_tonemap_curves tonemapCurves;
10446 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10447 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10448 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10449 tonemapCurves.tonemap_points_cnt,
10450 CAM_MAX_TONEMAP_CURVE_SIZE);
10451 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10452 }
10453
10454 /* ch0 = G*/
10455 size_t point = 0;
10456 cam_tonemap_curve_t tonemapCurveGreen;
10457 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10458 for (size_t j = 0; j < 2; j++) {
10459 tonemapCurveGreen.tonemap_points[i][j] =
10460 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10461 point++;
10462 }
10463 }
10464 tonemapCurves.curves[0] = tonemapCurveGreen;
10465
10466 /* ch 1 = B */
10467 point = 0;
10468 cam_tonemap_curve_t tonemapCurveBlue;
10469 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10470 for (size_t j = 0; j < 2; j++) {
10471 tonemapCurveBlue.tonemap_points[i][j] =
10472 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10473 point++;
10474 }
10475 }
10476 tonemapCurves.curves[1] = tonemapCurveBlue;
10477
10478 /* ch 2 = R */
10479 point = 0;
10480 cam_tonemap_curve_t tonemapCurveRed;
10481 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10482 for (size_t j = 0; j < 2; j++) {
10483 tonemapCurveRed.tonemap_points[i][j] =
10484 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10485 point++;
10486 }
10487 }
10488 tonemapCurves.curves[2] = tonemapCurveRed;
10489
10490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10491 tonemapCurves)) {
10492 rc = BAD_VALUE;
10493 }
10494 }
10495
10496 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10497 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10499 captureIntent)) {
10500 rc = BAD_VALUE;
10501 }
10502 }
10503
10504 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10505 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10507 blackLevelLock)) {
10508 rc = BAD_VALUE;
10509 }
10510 }
10511
10512 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10513 uint8_t lensShadingMapMode =
10514 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10516 lensShadingMapMode)) {
10517 rc = BAD_VALUE;
10518 }
10519 }
10520
10521 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10522 cam_area_t roi;
10523 bool reset = true;
10524 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10525
10526 // Map coordinate system from active array to sensor output.
10527 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10528 roi.rect.height);
10529
10530 if (scalerCropSet) {
10531 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10532 }
10533 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10534 rc = BAD_VALUE;
10535 }
10536 }
10537
10538 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10539 cam_area_t roi;
10540 bool reset = true;
10541 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10542
10543 // Map coordinate system from active array to sensor output.
10544 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10545 roi.rect.height);
10546
10547 if (scalerCropSet) {
10548 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10549 }
10550 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10551 rc = BAD_VALUE;
10552 }
10553 }
10554
10555 // CDS for non-HFR non-video mode
10556 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10557 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10558 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10559 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10560 LOGE("Invalid CDS mode %d!", *fwk_cds);
10561 } else {
10562 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10563 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10564 rc = BAD_VALUE;
10565 }
10566 }
10567 }
10568
Thierry Strudel04e026f2016-10-10 11:27:36 -070010569 // Video HDR
10570 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10571 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10572 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10573 rc = setVideoHdrMode(mParameters, vhdr);
10574 if (rc != NO_ERROR) {
10575 LOGE("setVideoHDR is failed");
10576 }
10577 }
10578
10579 //IR
10580 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10581 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10582 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10583 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10584 LOGE("Invalid IR mode %d!", fwk_ir);
10585 } else {
10586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10587 CAM_INTF_META_IR_MODE, fwk_ir)) {
10588 rc = BAD_VALUE;
10589 }
10590 }
10591 }
10592
Thierry Strudel269c81a2016-10-12 12:13:59 -070010593 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10594 float aec_speed;
10595 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10596 LOGD("AEC Speed :%f", aec_speed);
10597 if ( aec_speed < 0 ) {
10598 LOGE("Invalid AEC mode %f!", aec_speed);
10599 } else {
10600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10601 aec_speed)) {
10602 rc = BAD_VALUE;
10603 }
10604 }
10605 }
10606
10607 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10608 float awb_speed;
10609 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10610 LOGD("AWB Speed :%f", awb_speed);
10611 if ( awb_speed < 0 ) {
10612 LOGE("Invalid AWB mode %f!", awb_speed);
10613 } else {
10614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10615 awb_speed)) {
10616 rc = BAD_VALUE;
10617 }
10618 }
10619 }
10620
Thierry Strudel3d639192016-09-09 11:52:26 -070010621 // TNR
10622 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10623 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10624 uint8_t b_TnrRequested = 0;
10625 cam_denoise_param_t tnr;
10626 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10627 tnr.process_plates =
10628 (cam_denoise_process_type_t)frame_settings.find(
10629 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10630 b_TnrRequested = tnr.denoise_enable;
10631 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10632 rc = BAD_VALUE;
10633 }
10634 }
10635
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010636 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10637 int32_t* exposure_metering_mode =
10638 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10639 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10640 *exposure_metering_mode)) {
10641 rc = BAD_VALUE;
10642 }
10643 }
10644
Thierry Strudel3d639192016-09-09 11:52:26 -070010645 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10646 int32_t fwk_testPatternMode =
10647 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10648 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10649 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10650
10651 if (NAME_NOT_FOUND != testPatternMode) {
10652 cam_test_pattern_data_t testPatternData;
10653 memset(&testPatternData, 0, sizeof(testPatternData));
10654 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10655 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10656 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10657 int32_t *fwk_testPatternData =
10658 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10659 testPatternData.r = fwk_testPatternData[0];
10660 testPatternData.b = fwk_testPatternData[3];
10661 switch (gCamCapability[mCameraId]->color_arrangement) {
10662 case CAM_FILTER_ARRANGEMENT_RGGB:
10663 case CAM_FILTER_ARRANGEMENT_GRBG:
10664 testPatternData.gr = fwk_testPatternData[1];
10665 testPatternData.gb = fwk_testPatternData[2];
10666 break;
10667 case CAM_FILTER_ARRANGEMENT_GBRG:
10668 case CAM_FILTER_ARRANGEMENT_BGGR:
10669 testPatternData.gr = fwk_testPatternData[2];
10670 testPatternData.gb = fwk_testPatternData[1];
10671 break;
10672 default:
10673 LOGE("color arrangement %d is not supported",
10674 gCamCapability[mCameraId]->color_arrangement);
10675 break;
10676 }
10677 }
10678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
10679 testPatternData)) {
10680 rc = BAD_VALUE;
10681 }
10682 } else {
10683 LOGE("Invalid framework sensor test pattern mode %d",
10684 fwk_testPatternMode);
10685 }
10686 }
10687
10688 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
10689 size_t count = 0;
10690 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
10691 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
10692 gps_coords.data.d, gps_coords.count, count);
10693 if (gps_coords.count != count) {
10694 rc = BAD_VALUE;
10695 }
10696 }
10697
10698 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
10699 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
10700 size_t count = 0;
10701 const char *gps_methods_src = (const char *)
10702 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
10703 memset(gps_methods, '\0', sizeof(gps_methods));
10704 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
10705 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
10706 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
10707 if (GPS_PROCESSING_METHOD_SIZE != count) {
10708 rc = BAD_VALUE;
10709 }
10710 }
10711
10712 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
10713 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
10714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
10715 gps_timestamp)) {
10716 rc = BAD_VALUE;
10717 }
10718 }
10719
10720 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
10721 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
10722 cam_rotation_info_t rotation_info;
10723 if (orientation == 0) {
10724 rotation_info.rotation = ROTATE_0;
10725 } else if (orientation == 90) {
10726 rotation_info.rotation = ROTATE_90;
10727 } else if (orientation == 180) {
10728 rotation_info.rotation = ROTATE_180;
10729 } else if (orientation == 270) {
10730 rotation_info.rotation = ROTATE_270;
10731 }
10732 rotation_info.streamId = snapshotStreamId;
10733 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
10734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
10735 rc = BAD_VALUE;
10736 }
10737 }
10738
10739 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
10740 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
10741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
10742 rc = BAD_VALUE;
10743 }
10744 }
10745
10746 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
10747 uint32_t thumb_quality = (uint32_t)
10748 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
10749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
10750 thumb_quality)) {
10751 rc = BAD_VALUE;
10752 }
10753 }
10754
10755 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10756 cam_dimension_t dim;
10757 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10758 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
10760 rc = BAD_VALUE;
10761 }
10762 }
10763
10764 // Internal metadata
10765 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
10766 size_t count = 0;
10767 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
10768 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
10769 privatedata.data.i32, privatedata.count, count);
10770 if (privatedata.count != count) {
10771 rc = BAD_VALUE;
10772 }
10773 }
10774
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010775 // ISO/Exposure Priority
10776 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
10777 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
10778 cam_priority_mode_t mode =
10779 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
10780 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
10781 cam_intf_parm_manual_3a_t use_iso_exp_pty;
10782 use_iso_exp_pty.previewOnly = FALSE;
10783 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
10784 use_iso_exp_pty.value = *ptr;
10785
10786 if(CAM_ISO_PRIORITY == mode) {
10787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
10788 use_iso_exp_pty)) {
10789 rc = BAD_VALUE;
10790 }
10791 }
10792 else {
10793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
10794 use_iso_exp_pty)) {
10795 rc = BAD_VALUE;
10796 }
10797 }
10798 }
10799 }
10800
10801 // Saturation
10802 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
10803 int32_t* use_saturation =
10804 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
10805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
10806 rc = BAD_VALUE;
10807 }
10808 }
10809
Thierry Strudel3d639192016-09-09 11:52:26 -070010810 // EV step
10811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
10812 gCamCapability[mCameraId]->exp_compensation_step)) {
10813 rc = BAD_VALUE;
10814 }
10815
10816 // CDS info
10817 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
10818 cam_cds_data_t *cdsData = (cam_cds_data_t *)
10819 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
10820
10821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10822 CAM_INTF_META_CDS_DATA, *cdsData)) {
10823 rc = BAD_VALUE;
10824 }
10825 }
10826
Shuzhen Wang19463d72016-03-08 11:09:52 -080010827 // Hybrid AE
10828 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
10829 uint8_t *hybrid_ae = (uint8_t *)
10830 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
10831
10832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10833 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
10834 rc = BAD_VALUE;
10835 }
10836 }
10837
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 return rc;
10839}
10840
10841/*===========================================================================
10842 * FUNCTION : captureResultCb
10843 *
10844 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
10845 *
10846 * PARAMETERS :
10847 * @frame : frame information from mm-camera-interface
10848 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
10849 * @userdata: userdata
10850 *
10851 * RETURN : NONE
10852 *==========================================================================*/
10853void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
10854 camera3_stream_buffer_t *buffer,
10855 uint32_t frame_number, bool isInputBuffer, void *userdata)
10856{
10857 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10858 if (hw == NULL) {
10859 LOGE("Invalid hw %p", hw);
10860 return;
10861 }
10862
10863 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
10864 return;
10865}
10866
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010867/*===========================================================================
10868 * FUNCTION : setBufferErrorStatus
10869 *
10870 * DESCRIPTION: Callback handler for channels to report any buffer errors
10871 *
10872 * PARAMETERS :
10873 * @ch : Channel on which buffer error is reported from
10874 * @frame_number : frame number on which buffer error is reported on
10875 * @buffer_status : buffer error status
10876 * @userdata: userdata
10877 *
10878 * RETURN : NONE
10879 *==========================================================================*/
10880void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10881 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
10882{
10883 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
10884 if (hw == NULL) {
10885 LOGE("Invalid hw %p", hw);
10886 return;
10887 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010888
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010889 hw->setBufferErrorStatus(ch, frame_number, err);
10890 return;
10891}
10892
10893void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
10894 uint32_t frameNumber, camera3_buffer_status_t err)
10895{
10896 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
10897 pthread_mutex_lock(&mMutex);
10898
10899 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
10900 if (req.frame_number != frameNumber)
10901 continue;
10902 for (auto& k : req.mPendingBufferList) {
10903 if(k.stream->priv == ch) {
10904 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
10905 }
10906 }
10907 }
10908
10909 pthread_mutex_unlock(&mMutex);
10910 return;
10911}
Thierry Strudel3d639192016-09-09 11:52:26 -070010912/*===========================================================================
10913 * FUNCTION : initialize
10914 *
10915 * DESCRIPTION: Pass framework callback pointers to HAL
10916 *
10917 * PARAMETERS :
10918 *
10919 *
10920 * RETURN : Success : 0
10921 * Failure: -ENODEV
10922 *==========================================================================*/
10923
10924int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
10925 const camera3_callback_ops_t *callback_ops)
10926{
10927 LOGD("E");
10928 QCamera3HardwareInterface *hw =
10929 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10930 if (!hw) {
10931 LOGE("NULL camera device");
10932 return -ENODEV;
10933 }
10934
10935 int rc = hw->initialize(callback_ops);
10936 LOGD("X");
10937 return rc;
10938}
10939
10940/*===========================================================================
10941 * FUNCTION : configure_streams
10942 *
10943 * DESCRIPTION:
10944 *
10945 * PARAMETERS :
10946 *
10947 *
10948 * RETURN : Success: 0
10949 * Failure: -EINVAL (if stream configuration is invalid)
10950 * -ENODEV (fatal error)
10951 *==========================================================================*/
10952
10953int QCamera3HardwareInterface::configure_streams(
10954 const struct camera3_device *device,
10955 camera3_stream_configuration_t *stream_list)
10956{
10957 LOGD("E");
10958 QCamera3HardwareInterface *hw =
10959 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10960 if (!hw) {
10961 LOGE("NULL camera device");
10962 return -ENODEV;
10963 }
10964 int rc = hw->configureStreams(stream_list);
10965 LOGD("X");
10966 return rc;
10967}
10968
10969/*===========================================================================
10970 * FUNCTION : construct_default_request_settings
10971 *
10972 * DESCRIPTION: Configure a settings buffer to meet the required use case
10973 *
10974 * PARAMETERS :
10975 *
10976 *
10977 * RETURN : Success: Return valid metadata
10978 * Failure: Return NULL
10979 *==========================================================================*/
10980const camera_metadata_t* QCamera3HardwareInterface::
10981 construct_default_request_settings(const struct camera3_device *device,
10982 int type)
10983{
10984
10985 LOGD("E");
10986 camera_metadata_t* fwk_metadata = NULL;
10987 QCamera3HardwareInterface *hw =
10988 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10989 if (!hw) {
10990 LOGE("NULL camera device");
10991 return NULL;
10992 }
10993
10994 fwk_metadata = hw->translateCapabilityToMetadata(type);
10995
10996 LOGD("X");
10997 return fwk_metadata;
10998}
10999
11000/*===========================================================================
11001 * FUNCTION : process_capture_request
11002 *
11003 * DESCRIPTION:
11004 *
11005 * PARAMETERS :
11006 *
11007 *
11008 * RETURN :
11009 *==========================================================================*/
11010int QCamera3HardwareInterface::process_capture_request(
11011 const struct camera3_device *device,
11012 camera3_capture_request_t *request)
11013{
11014 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011015 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011016 QCamera3HardwareInterface *hw =
11017 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11018 if (!hw) {
11019 LOGE("NULL camera device");
11020 return -EINVAL;
11021 }
11022
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011023 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011024 LOGD("X");
11025 return rc;
11026}
11027
11028/*===========================================================================
11029 * FUNCTION : dump
11030 *
11031 * DESCRIPTION:
11032 *
11033 * PARAMETERS :
11034 *
11035 *
11036 * RETURN :
11037 *==========================================================================*/
11038
11039void QCamera3HardwareInterface::dump(
11040 const struct camera3_device *device, int fd)
11041{
11042 /* Log level property is read when "adb shell dumpsys media.camera" is
11043 called so that the log level can be controlled without restarting
11044 the media server */
11045 getLogLevel();
11046
11047 LOGD("E");
11048 QCamera3HardwareInterface *hw =
11049 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11050 if (!hw) {
11051 LOGE("NULL camera device");
11052 return;
11053 }
11054
11055 hw->dump(fd);
11056 LOGD("X");
11057 return;
11058}
11059
11060/*===========================================================================
11061 * FUNCTION : flush
11062 *
11063 * DESCRIPTION:
11064 *
11065 * PARAMETERS :
11066 *
11067 *
11068 * RETURN :
11069 *==========================================================================*/
11070
11071int QCamera3HardwareInterface::flush(
11072 const struct camera3_device *device)
11073{
11074 int rc;
11075 LOGD("E");
11076 QCamera3HardwareInterface *hw =
11077 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11078 if (!hw) {
11079 LOGE("NULL camera device");
11080 return -EINVAL;
11081 }
11082
11083 pthread_mutex_lock(&hw->mMutex);
11084 // Validate current state
11085 switch (hw->mState) {
11086 case STARTED:
11087 /* valid state */
11088 break;
11089
11090 case ERROR:
11091 pthread_mutex_unlock(&hw->mMutex);
11092 hw->handleCameraDeviceError();
11093 return -ENODEV;
11094
11095 default:
11096 LOGI("Flush returned during state %d", hw->mState);
11097 pthread_mutex_unlock(&hw->mMutex);
11098 return 0;
11099 }
11100 pthread_mutex_unlock(&hw->mMutex);
11101
11102 rc = hw->flush(true /* restart channels */ );
11103 LOGD("X");
11104 return rc;
11105}
11106
11107/*===========================================================================
11108 * FUNCTION : close_camera_device
11109 *
11110 * DESCRIPTION:
11111 *
11112 * PARAMETERS :
11113 *
11114 *
11115 * RETURN :
11116 *==========================================================================*/
11117int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11118{
11119 int ret = NO_ERROR;
11120 QCamera3HardwareInterface *hw =
11121 reinterpret_cast<QCamera3HardwareInterface *>(
11122 reinterpret_cast<camera3_device_t *>(device)->priv);
11123 if (!hw) {
11124 LOGE("NULL camera device");
11125 return BAD_VALUE;
11126 }
11127
11128 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11129 delete hw;
11130 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011131 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011132 return ret;
11133}
11134
11135/*===========================================================================
11136 * FUNCTION : getWaveletDenoiseProcessPlate
11137 *
11138 * DESCRIPTION: query wavelet denoise process plate
11139 *
11140 * PARAMETERS : None
11141 *
11142 * RETURN : WNR prcocess plate value
11143 *==========================================================================*/
11144cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11145{
11146 char prop[PROPERTY_VALUE_MAX];
11147 memset(prop, 0, sizeof(prop));
11148 property_get("persist.denoise.process.plates", prop, "0");
11149 int processPlate = atoi(prop);
11150 switch(processPlate) {
11151 case 0:
11152 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11153 case 1:
11154 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11155 case 2:
11156 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11157 case 3:
11158 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11159 default:
11160 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11161 }
11162}
11163
11164
11165/*===========================================================================
11166 * FUNCTION : getTemporalDenoiseProcessPlate
11167 *
11168 * DESCRIPTION: query temporal denoise process plate
11169 *
11170 * PARAMETERS : None
11171 *
11172 * RETURN : TNR prcocess plate value
11173 *==========================================================================*/
11174cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11175{
11176 char prop[PROPERTY_VALUE_MAX];
11177 memset(prop, 0, sizeof(prop));
11178 property_get("persist.tnr.process.plates", prop, "0");
11179 int processPlate = atoi(prop);
11180 switch(processPlate) {
11181 case 0:
11182 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11183 case 1:
11184 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11185 case 2:
11186 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11187 case 3:
11188 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11189 default:
11190 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11191 }
11192}
11193
11194
11195/*===========================================================================
11196 * FUNCTION : extractSceneMode
11197 *
11198 * DESCRIPTION: Extract scene mode from frameworks set metadata
11199 *
11200 * PARAMETERS :
11201 * @frame_settings: CameraMetadata reference
11202 * @metaMode: ANDROID_CONTORL_MODE
11203 * @hal_metadata: hal metadata structure
11204 *
11205 * RETURN : None
11206 *==========================================================================*/
11207int32_t QCamera3HardwareInterface::extractSceneMode(
11208 const CameraMetadata &frame_settings, uint8_t metaMode,
11209 metadata_buffer_t *hal_metadata)
11210{
11211 int32_t rc = NO_ERROR;
11212
11213 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11214 camera_metadata_ro_entry entry =
11215 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11216 if (0 == entry.count)
11217 return rc;
11218
11219 uint8_t fwk_sceneMode = entry.data.u8[0];
11220
11221 int val = lookupHalName(SCENE_MODES_MAP,
11222 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11223 fwk_sceneMode);
11224 if (NAME_NOT_FOUND != val) {
11225 uint8_t sceneMode = (uint8_t)val;
11226 LOGD("sceneMode: %d", sceneMode);
11227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11228 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11229 rc = BAD_VALUE;
11230 }
11231 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011232
11233 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11234 cam_hdr_param_t hdr_params;
11235 hdr_params.hdr_enable = 1;
11236 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11237 hdr_params.hdr_need_1x = false;
11238 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11239 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11240 rc = BAD_VALUE;
11241 }
11242 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011243 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11244 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11245 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11246 LOGD("sceneMode: %d", sceneMode);
11247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11248 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11249 rc = BAD_VALUE;
11250 }
11251 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011252
11253 if (mForceHdrSnapshot) {
11254 cam_hdr_param_t hdr_params;
11255 hdr_params.hdr_enable = 1;
11256 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11257 hdr_params.hdr_need_1x = false;
11258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11259 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11260 rc = BAD_VALUE;
11261 }
11262 }
11263
Thierry Strudel3d639192016-09-09 11:52:26 -070011264 return rc;
11265}
11266
11267/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011268 * FUNCTION : setVideoHdrMode
11269 *
11270 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11271 *
11272 * PARAMETERS :
11273 * @hal_metadata: hal metadata structure
11274 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11275 *
11276 * RETURN : None
11277 *==========================================================================*/
11278int32_t QCamera3HardwareInterface::setVideoHdrMode(
11279 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11280{
11281 int32_t rc = NO_ERROR;
11282 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11283 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11284 rc = BAD_VALUE;
11285 } else {
11286 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11287 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11288 LOGD("Setting HDR mode Off");
11289 vhdr_type = CAM_SENSOR_HDR_OFF;
11290 } else {
11291 char video_hdr_prop[PROPERTY_VALUE_MAX];
11292 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11293 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11294 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11295 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11296 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11297 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11298 LOGD("Setting HDR mode In Sensor");
11299 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11300 }
11301 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11302 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11303 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11304 LOGD("Setting HDR mode Zigzag");
11305 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11306 }
11307 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11308 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11309 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11310 LOGD("Setting HDR mode Staggered");
11311 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11312 }
11313 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11314 LOGD("HDR mode not supported");
11315 rc = BAD_VALUE;
11316 }
11317 }
11318 if(rc == NO_ERROR) {
11319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11320 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11321 rc = BAD_VALUE;
11322 }
11323 }
11324 }
11325 return rc;
11326}
11327
11328/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011329 * FUNCTION : needRotationReprocess
11330 *
11331 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11332 *
11333 * PARAMETERS : none
11334 *
11335 * RETURN : true: needed
11336 * false: no need
11337 *==========================================================================*/
11338bool QCamera3HardwareInterface::needRotationReprocess()
11339{
11340 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11341 // current rotation is not zero, and pp has the capability to process rotation
11342 LOGH("need do reprocess for rotation");
11343 return true;
11344 }
11345
11346 return false;
11347}
11348
11349/*===========================================================================
11350 * FUNCTION : needReprocess
11351 *
11352 * DESCRIPTION: if reprocess in needed
11353 *
11354 * PARAMETERS : none
11355 *
11356 * RETURN : true: needed
11357 * false: no need
11358 *==========================================================================*/
11359bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11360{
11361 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11362 // TODO: add for ZSL HDR later
11363 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11364 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11365 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11366 return true;
11367 } else {
11368 LOGH("already post processed frame");
11369 return false;
11370 }
11371 }
11372 return needRotationReprocess();
11373}
11374
11375/*===========================================================================
11376 * FUNCTION : needJpegExifRotation
11377 *
11378 * DESCRIPTION: if rotation from jpeg is needed
11379 *
11380 * PARAMETERS : none
11381 *
11382 * RETURN : true: needed
11383 * false: no need
11384 *==========================================================================*/
11385bool QCamera3HardwareInterface::needJpegExifRotation()
11386{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011387 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011388 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11389 LOGD("Need use Jpeg EXIF Rotation");
11390 return true;
11391 }
11392 return false;
11393}
11394
11395/*===========================================================================
11396 * FUNCTION : addOfflineReprocChannel
11397 *
11398 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11399 * coming from input channel
11400 *
11401 * PARAMETERS :
11402 * @config : reprocess configuration
11403 * @inputChHandle : pointer to the input (source) channel
11404 *
11405 *
11406 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11407 *==========================================================================*/
11408QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11409 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11410{
11411 int32_t rc = NO_ERROR;
11412 QCamera3ReprocessChannel *pChannel = NULL;
11413
11414 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011415 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11416 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011417 if (NULL == pChannel) {
11418 LOGE("no mem for reprocess channel");
11419 return NULL;
11420 }
11421
11422 rc = pChannel->initialize(IS_TYPE_NONE);
11423 if (rc != NO_ERROR) {
11424 LOGE("init reprocess channel failed, ret = %d", rc);
11425 delete pChannel;
11426 return NULL;
11427 }
11428
11429 // pp feature config
11430 cam_pp_feature_config_t pp_config;
11431 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11432
11433 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11434 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11435 & CAM_QCOM_FEATURE_DSDN) {
11436 //Use CPP CDS incase h/w supports it.
11437 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11438 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11439 }
11440 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11441 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11442 }
11443
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011444 if (config.hdr_param.hdr_enable) {
11445 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11446 pp_config.hdr_param = config.hdr_param;
11447 }
11448
11449 if (mForceHdrSnapshot) {
11450 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11451 pp_config.hdr_param.hdr_enable = 1;
11452 pp_config.hdr_param.hdr_need_1x = 0;
11453 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11454 }
11455
Thierry Strudel3d639192016-09-09 11:52:26 -070011456 rc = pChannel->addReprocStreamsFromSource(pp_config,
11457 config,
11458 IS_TYPE_NONE,
11459 mMetadataChannel);
11460
11461 if (rc != NO_ERROR) {
11462 delete pChannel;
11463 return NULL;
11464 }
11465 return pChannel;
11466}
11467
11468/*===========================================================================
11469 * FUNCTION : getMobicatMask
11470 *
11471 * DESCRIPTION: returns mobicat mask
11472 *
11473 * PARAMETERS : none
11474 *
11475 * RETURN : mobicat mask
11476 *
11477 *==========================================================================*/
11478uint8_t QCamera3HardwareInterface::getMobicatMask()
11479{
11480 return m_MobicatMask;
11481}
11482
11483/*===========================================================================
11484 * FUNCTION : setMobicat
11485 *
11486 * DESCRIPTION: set Mobicat on/off.
11487 *
11488 * PARAMETERS :
11489 * @params : none
11490 *
11491 * RETURN : int32_t type of status
11492 * NO_ERROR -- success
11493 * none-zero failure code
11494 *==========================================================================*/
11495int32_t QCamera3HardwareInterface::setMobicat()
11496{
11497 char value [PROPERTY_VALUE_MAX];
11498 property_get("persist.camera.mobicat", value, "0");
11499 int32_t ret = NO_ERROR;
11500 uint8_t enableMobi = (uint8_t)atoi(value);
11501
11502 if (enableMobi) {
11503 tune_cmd_t tune_cmd;
11504 tune_cmd.type = SET_RELOAD_CHROMATIX;
11505 tune_cmd.module = MODULE_ALL;
11506 tune_cmd.value = TRUE;
11507 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11508 CAM_INTF_PARM_SET_VFE_COMMAND,
11509 tune_cmd);
11510
11511 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11512 CAM_INTF_PARM_SET_PP_COMMAND,
11513 tune_cmd);
11514 }
11515 m_MobicatMask = enableMobi;
11516
11517 return ret;
11518}
11519
11520/*===========================================================================
11521* FUNCTION : getLogLevel
11522*
11523* DESCRIPTION: Reads the log level property into a variable
11524*
11525* PARAMETERS :
11526* None
11527*
11528* RETURN :
11529* None
11530*==========================================================================*/
11531void QCamera3HardwareInterface::getLogLevel()
11532{
11533 char prop[PROPERTY_VALUE_MAX];
11534 uint32_t globalLogLevel = 0;
11535
11536 property_get("persist.camera.hal.debug", prop, "0");
11537 int val = atoi(prop);
11538 if (0 <= val) {
11539 gCamHal3LogLevel = (uint32_t)val;
11540 }
11541
11542 property_get("persist.camera.kpi.debug", prop, "1");
11543 gKpiDebugLevel = atoi(prop);
11544
11545 property_get("persist.camera.global.debug", prop, "0");
11546 val = atoi(prop);
11547 if (0 <= val) {
11548 globalLogLevel = (uint32_t)val;
11549 }
11550
11551 /* Highest log level among hal.logs and global.logs is selected */
11552 if (gCamHal3LogLevel < globalLogLevel)
11553 gCamHal3LogLevel = globalLogLevel;
11554
11555 return;
11556}
11557
11558/*===========================================================================
11559 * FUNCTION : validateStreamRotations
11560 *
11561 * DESCRIPTION: Check if the rotations requested are supported
11562 *
11563 * PARAMETERS :
11564 * @stream_list : streams to be configured
11565 *
11566 * RETURN : NO_ERROR on success
11567 * -EINVAL on failure
11568 *
11569 *==========================================================================*/
11570int QCamera3HardwareInterface::validateStreamRotations(
11571 camera3_stream_configuration_t *streamList)
11572{
11573 int rc = NO_ERROR;
11574
11575 /*
11576 * Loop through all streams requested in configuration
11577 * Check if unsupported rotations have been requested on any of them
11578 */
11579 for (size_t j = 0; j < streamList->num_streams; j++){
11580 camera3_stream_t *newStream = streamList->streams[j];
11581
11582 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11583 bool isImplDef = (newStream->format ==
11584 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11585 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11586 isImplDef);
11587
11588 if (isRotated && (!isImplDef || isZsl)) {
11589 LOGE("Error: Unsupported rotation of %d requested for stream"
11590 "type:%d and stream format:%d",
11591 newStream->rotation, newStream->stream_type,
11592 newStream->format);
11593 rc = -EINVAL;
11594 break;
11595 }
11596 }
11597
11598 return rc;
11599}
11600
11601/*===========================================================================
11602* FUNCTION : getFlashInfo
11603*
11604* DESCRIPTION: Retrieve information about whether the device has a flash.
11605*
11606* PARAMETERS :
11607* @cameraId : Camera id to query
11608* @hasFlash : Boolean indicating whether there is a flash device
11609* associated with given camera
11610* @flashNode : If a flash device exists, this will be its device node.
11611*
11612* RETURN :
11613* None
11614*==========================================================================*/
11615void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11616 bool& hasFlash,
11617 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11618{
11619 cam_capability_t* camCapability = gCamCapability[cameraId];
11620 if (NULL == camCapability) {
11621 hasFlash = false;
11622 flashNode[0] = '\0';
11623 } else {
11624 hasFlash = camCapability->flash_available;
11625 strlcpy(flashNode,
11626 (char*)camCapability->flash_dev_name,
11627 QCAMERA_MAX_FILEPATH_LENGTH);
11628 }
11629}
11630
11631/*===========================================================================
11632* FUNCTION : getEepromVersionInfo
11633*
11634* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11635*
11636* PARAMETERS : None
11637*
11638* RETURN : string describing EEPROM version
11639* "\0" if no such info available
11640*==========================================================================*/
11641const char *QCamera3HardwareInterface::getEepromVersionInfo()
11642{
11643 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11644}
11645
11646/*===========================================================================
11647* FUNCTION : getLdafCalib
11648*
11649* DESCRIPTION: Retrieve Laser AF calibration data
11650*
11651* PARAMETERS : None
11652*
11653* RETURN : Two uint32_t describing laser AF calibration data
11654* NULL if none is available.
11655*==========================================================================*/
11656const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11657{
11658 if (mLdafCalibExist) {
11659 return &mLdafCalib[0];
11660 } else {
11661 return NULL;
11662 }
11663}
11664
11665/*===========================================================================
11666 * FUNCTION : dynamicUpdateMetaStreamInfo
11667 *
11668 * DESCRIPTION: This function:
11669 * (1) stops all the channels
11670 * (2) returns error on pending requests and buffers
11671 * (3) sends metastream_info in setparams
11672 * (4) starts all channels
11673 * This is useful when sensor has to be restarted to apply any
11674 * settings such as frame rate from a different sensor mode
11675 *
11676 * PARAMETERS : None
11677 *
11678 * RETURN : NO_ERROR on success
11679 * Error codes on failure
11680 *
11681 *==========================================================================*/
11682int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
11683{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011684 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011685 int rc = NO_ERROR;
11686
11687 LOGD("E");
11688
11689 rc = stopAllChannels();
11690 if (rc < 0) {
11691 LOGE("stopAllChannels failed");
11692 return rc;
11693 }
11694
11695 rc = notifyErrorForPendingRequests();
11696 if (rc < 0) {
11697 LOGE("notifyErrorForPendingRequests failed");
11698 return rc;
11699 }
11700
11701 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
11702 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
11703 "Format:%d",
11704 mStreamConfigInfo.type[i],
11705 mStreamConfigInfo.stream_sizes[i].width,
11706 mStreamConfigInfo.stream_sizes[i].height,
11707 mStreamConfigInfo.postprocess_mask[i],
11708 mStreamConfigInfo.format[i]);
11709 }
11710
11711 /* Send meta stream info once again so that ISP can start */
11712 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11713 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
11714 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
11715 mParameters);
11716 if (rc < 0) {
11717 LOGE("set Metastreaminfo failed. Sensor mode does not change");
11718 }
11719
11720 rc = startAllChannels();
11721 if (rc < 0) {
11722 LOGE("startAllChannels failed");
11723 return rc;
11724 }
11725
11726 LOGD("X");
11727 return rc;
11728}
11729
11730/*===========================================================================
11731 * FUNCTION : stopAllChannels
11732 *
11733 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
11734 *
11735 * PARAMETERS : None
11736 *
11737 * RETURN : NO_ERROR on success
11738 * Error codes on failure
11739 *
11740 *==========================================================================*/
11741int32_t QCamera3HardwareInterface::stopAllChannels()
11742{
11743 int32_t rc = NO_ERROR;
11744
11745 LOGD("Stopping all channels");
11746 // Stop the Streams/Channels
11747 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11748 it != mStreamInfo.end(); it++) {
11749 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11750 if (channel) {
11751 channel->stop();
11752 }
11753 (*it)->status = INVALID;
11754 }
11755
11756 if (mSupportChannel) {
11757 mSupportChannel->stop();
11758 }
11759 if (mAnalysisChannel) {
11760 mAnalysisChannel->stop();
11761 }
11762 if (mRawDumpChannel) {
11763 mRawDumpChannel->stop();
11764 }
11765 if (mMetadataChannel) {
11766 /* If content of mStreamInfo is not 0, there is metadata stream */
11767 mMetadataChannel->stop();
11768 }
11769
11770 LOGD("All channels stopped");
11771 return rc;
11772}
11773
11774/*===========================================================================
11775 * FUNCTION : startAllChannels
11776 *
11777 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
11778 *
11779 * PARAMETERS : None
11780 *
11781 * RETURN : NO_ERROR on success
11782 * Error codes on failure
11783 *
11784 *==========================================================================*/
11785int32_t QCamera3HardwareInterface::startAllChannels()
11786{
11787 int32_t rc = NO_ERROR;
11788
11789 LOGD("Start all channels ");
11790 // Start the Streams/Channels
11791 if (mMetadataChannel) {
11792 /* If content of mStreamInfo is not 0, there is metadata stream */
11793 rc = mMetadataChannel->start();
11794 if (rc < 0) {
11795 LOGE("META channel start failed");
11796 return rc;
11797 }
11798 }
11799 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
11800 it != mStreamInfo.end(); it++) {
11801 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
11802 if (channel) {
11803 rc = channel->start();
11804 if (rc < 0) {
11805 LOGE("channel start failed");
11806 return rc;
11807 }
11808 }
11809 }
11810 if (mAnalysisChannel) {
11811 mAnalysisChannel->start();
11812 }
11813 if (mSupportChannel) {
11814 rc = mSupportChannel->start();
11815 if (rc < 0) {
11816 LOGE("Support channel start failed");
11817 return rc;
11818 }
11819 }
11820 if (mRawDumpChannel) {
11821 rc = mRawDumpChannel->start();
11822 if (rc < 0) {
11823 LOGE("RAW dump channel start failed");
11824 return rc;
11825 }
11826 }
11827
11828 LOGD("All channels started");
11829 return rc;
11830}
11831
11832/*===========================================================================
11833 * FUNCTION : notifyErrorForPendingRequests
11834 *
11835 * DESCRIPTION: This function sends error for all the pending requests/buffers
11836 *
11837 * PARAMETERS : None
11838 *
11839 * RETURN : Error codes
11840 * NO_ERROR on success
11841 *
11842 *==========================================================================*/
11843int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
11844{
11845 int32_t rc = NO_ERROR;
11846 unsigned int frameNum = 0;
11847 camera3_capture_result_t result;
11848 camera3_stream_buffer_t *pStream_Buf = NULL;
11849
11850 memset(&result, 0, sizeof(camera3_capture_result_t));
11851
11852 if (mPendingRequestsList.size() > 0) {
11853 pendingRequestIterator i = mPendingRequestsList.begin();
11854 frameNum = i->frame_number;
11855 } else {
11856 /* There might still be pending buffers even though there are
11857 no pending requests. Setting the frameNum to MAX so that
11858 all the buffers with smaller frame numbers are returned */
11859 frameNum = UINT_MAX;
11860 }
11861
11862 LOGH("Oldest frame num on mPendingRequestsList = %u",
11863 frameNum);
11864
11865 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
11866 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
11867
11868 if (req->frame_number < frameNum) {
11869 // Send Error notify to frameworks for each buffer for which
11870 // metadata buffer is already sent
11871 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
11872 req->frame_number, req->mPendingBufferList.size());
11873
11874 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11875 if (NULL == pStream_Buf) {
11876 LOGE("No memory for pending buffers array");
11877 return NO_MEMORY;
11878 }
11879 memset(pStream_Buf, 0,
11880 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11881 result.result = NULL;
11882 result.frame_number = req->frame_number;
11883 result.num_output_buffers = req->mPendingBufferList.size();
11884 result.output_buffers = pStream_Buf;
11885
11886 size_t index = 0;
11887 for (auto info = req->mPendingBufferList.begin();
11888 info != req->mPendingBufferList.end(); ) {
11889
11890 camera3_notify_msg_t notify_msg;
11891 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11892 notify_msg.type = CAMERA3_MSG_ERROR;
11893 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
11894 notify_msg.message.error.error_stream = info->stream;
11895 notify_msg.message.error.frame_number = req->frame_number;
11896 pStream_Buf[index].acquire_fence = -1;
11897 pStream_Buf[index].release_fence = -1;
11898 pStream_Buf[index].buffer = info->buffer;
11899 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11900 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011901 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070011902 index++;
11903 // Remove buffer from list
11904 info = req->mPendingBufferList.erase(info);
11905 }
11906
11907 // Remove this request from Map
11908 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11909 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11910 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11911
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011912 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070011913
11914 delete [] pStream_Buf;
11915 } else {
11916
11917 // Go through the pending requests info and send error request to framework
11918 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
11919
11920 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
11921
11922 // Send error notify to frameworks
11923 camera3_notify_msg_t notify_msg;
11924 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
11925 notify_msg.type = CAMERA3_MSG_ERROR;
11926 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
11927 notify_msg.message.error.error_stream = NULL;
11928 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011929 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070011930
11931 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
11932 if (NULL == pStream_Buf) {
11933 LOGE("No memory for pending buffers array");
11934 return NO_MEMORY;
11935 }
11936 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
11937
11938 result.result = NULL;
11939 result.frame_number = req->frame_number;
11940 result.input_buffer = i->input_buffer;
11941 result.num_output_buffers = req->mPendingBufferList.size();
11942 result.output_buffers = pStream_Buf;
11943
11944 size_t index = 0;
11945 for (auto info = req->mPendingBufferList.begin();
11946 info != req->mPendingBufferList.end(); ) {
11947 pStream_Buf[index].acquire_fence = -1;
11948 pStream_Buf[index].release_fence = -1;
11949 pStream_Buf[index].buffer = info->buffer;
11950 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
11951 pStream_Buf[index].stream = info->stream;
11952 index++;
11953 // Remove buffer from list
11954 info = req->mPendingBufferList.erase(info);
11955 }
11956
11957 // Remove this request from Map
11958 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
11959 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
11960 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
11961
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011962 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 delete [] pStream_Buf;
11964 i = erasePendingRequest(i);
11965 }
11966 }
11967
11968 /* Reset pending frame Drop list and requests list */
11969 mPendingFrameDropList.clear();
11970
11971 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
11972 req.mPendingBufferList.clear();
11973 }
11974 mPendingBuffersMap.mPendingBuffersInRequest.clear();
11975 mPendingReprocessResultList.clear();
11976 LOGH("Cleared all the pending buffers ");
11977
11978 return rc;
11979}
11980
11981bool QCamera3HardwareInterface::isOnEncoder(
11982 const cam_dimension_t max_viewfinder_size,
11983 uint32_t width, uint32_t height)
11984{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011985 return ((width > (uint32_t)max_viewfinder_size.width) ||
11986 (height > (uint32_t)max_viewfinder_size.height) ||
11987 (width > (uint32_t)VIDEO_4K_WIDTH) ||
11988 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070011989}
11990
11991/*===========================================================================
11992 * FUNCTION : setBundleInfo
11993 *
11994 * DESCRIPTION: Set bundle info for all streams that are bundle.
11995 *
11996 * PARAMETERS : None
11997 *
11998 * RETURN : NO_ERROR on success
11999 * Error codes on failure
12000 *==========================================================================*/
12001int32_t QCamera3HardwareInterface::setBundleInfo()
12002{
12003 int32_t rc = NO_ERROR;
12004
12005 if (mChannelHandle) {
12006 cam_bundle_config_t bundleInfo;
12007 memset(&bundleInfo, 0, sizeof(bundleInfo));
12008 rc = mCameraHandle->ops->get_bundle_info(
12009 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12010 if (rc != NO_ERROR) {
12011 LOGE("get_bundle_info failed");
12012 return rc;
12013 }
12014 if (mAnalysisChannel) {
12015 mAnalysisChannel->setBundleInfo(bundleInfo);
12016 }
12017 if (mSupportChannel) {
12018 mSupportChannel->setBundleInfo(bundleInfo);
12019 }
12020 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12021 it != mStreamInfo.end(); it++) {
12022 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12023 channel->setBundleInfo(bundleInfo);
12024 }
12025 if (mRawDumpChannel) {
12026 mRawDumpChannel->setBundleInfo(bundleInfo);
12027 }
12028 }
12029
12030 return rc;
12031}
12032
12033/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012034 * FUNCTION : setInstantAEC
12035 *
12036 * DESCRIPTION: Set Instant AEC related params.
12037 *
12038 * PARAMETERS :
12039 * @meta: CameraMetadata reference
12040 *
12041 * RETURN : NO_ERROR on success
12042 * Error codes on failure
12043 *==========================================================================*/
12044int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12045{
12046 int32_t rc = NO_ERROR;
12047 uint8_t val = 0;
12048 char prop[PROPERTY_VALUE_MAX];
12049
12050 // First try to configure instant AEC from framework metadata
12051 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12052 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12053 }
12054
12055 // If framework did not set this value, try to read from set prop.
12056 if (val == 0) {
12057 memset(prop, 0, sizeof(prop));
12058 property_get("persist.camera.instant.aec", prop, "0");
12059 val = (uint8_t)atoi(prop);
12060 }
12061
12062 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12063 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12064 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12065 mInstantAEC = val;
12066 mInstantAECSettledFrameNumber = 0;
12067 mInstantAecFrameIdxCount = 0;
12068 LOGH("instantAEC value set %d",val);
12069 if (mInstantAEC) {
12070 memset(prop, 0, sizeof(prop));
12071 property_get("persist.camera.ae.instant.bound", prop, "10");
12072 int32_t aec_frame_skip_cnt = atoi(prop);
12073 if (aec_frame_skip_cnt >= 0) {
12074 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12075 } else {
12076 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12077 rc = BAD_VALUE;
12078 }
12079 }
12080 } else {
12081 LOGE("Bad instant aec value set %d", val);
12082 rc = BAD_VALUE;
12083 }
12084 return rc;
12085}
12086
12087/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012088 * FUNCTION : get_num_overall_buffers
12089 *
12090 * DESCRIPTION: Estimate number of pending buffers across all requests.
12091 *
12092 * PARAMETERS : None
12093 *
12094 * RETURN : Number of overall pending buffers
12095 *
12096 *==========================================================================*/
12097uint32_t PendingBuffersMap::get_num_overall_buffers()
12098{
12099 uint32_t sum_buffers = 0;
12100 for (auto &req : mPendingBuffersInRequest) {
12101 sum_buffers += req.mPendingBufferList.size();
12102 }
12103 return sum_buffers;
12104}
12105
12106/*===========================================================================
12107 * FUNCTION : removeBuf
12108 *
12109 * DESCRIPTION: Remove a matching buffer from tracker.
12110 *
12111 * PARAMETERS : @buffer: image buffer for the callback
12112 *
12113 * RETURN : None
12114 *
12115 *==========================================================================*/
12116void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12117{
12118 bool buffer_found = false;
12119 for (auto req = mPendingBuffersInRequest.begin();
12120 req != mPendingBuffersInRequest.end(); req++) {
12121 for (auto k = req->mPendingBufferList.begin();
12122 k != req->mPendingBufferList.end(); k++ ) {
12123 if (k->buffer == buffer) {
12124 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12125 req->frame_number, buffer);
12126 k = req->mPendingBufferList.erase(k);
12127 if (req->mPendingBufferList.empty()) {
12128 // Remove this request from Map
12129 req = mPendingBuffersInRequest.erase(req);
12130 }
12131 buffer_found = true;
12132 break;
12133 }
12134 }
12135 if (buffer_found) {
12136 break;
12137 }
12138 }
12139 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12140 get_num_overall_buffers());
12141}
12142
12143/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012144 * FUNCTION : getBufErrStatus
12145 *
12146 * DESCRIPTION: get buffer error status
12147 *
12148 * PARAMETERS : @buffer: buffer handle
12149 *
12150 * RETURN : Error status
12151 *
12152 *==========================================================================*/
12153int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12154{
12155 for (auto& req : mPendingBuffersInRequest) {
12156 for (auto& k : req.mPendingBufferList) {
12157 if (k.buffer == buffer)
12158 return k.bufStatus;
12159 }
12160 }
12161 return CAMERA3_BUFFER_STATUS_OK;
12162}
12163
12164/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012165 * FUNCTION : setPAAFSupport
12166 *
12167 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12168 * feature mask according to stream type and filter
12169 * arrangement
12170 *
12171 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12172 * @stream_type: stream type
12173 * @filter_arrangement: filter arrangement
12174 *
12175 * RETURN : None
12176 *==========================================================================*/
12177void QCamera3HardwareInterface::setPAAFSupport(
12178 cam_feature_mask_t& feature_mask,
12179 cam_stream_type_t stream_type,
12180 cam_color_filter_arrangement_t filter_arrangement)
12181{
12182 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12183 feature_mask, stream_type, filter_arrangement);
12184
12185 switch (filter_arrangement) {
12186 case CAM_FILTER_ARRANGEMENT_RGGB:
12187 case CAM_FILTER_ARRANGEMENT_GRBG:
12188 case CAM_FILTER_ARRANGEMENT_GBRG:
12189 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012190 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12191 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012192 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12193 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12194 }
12195 break;
12196 case CAM_FILTER_ARRANGEMENT_Y:
12197 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12198 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12199 }
12200 break;
12201 default:
12202 break;
12203 }
12204}
12205
12206/*===========================================================================
12207* FUNCTION : getSensorMountAngle
12208*
12209* DESCRIPTION: Retrieve sensor mount angle
12210*
12211* PARAMETERS : None
12212*
12213* RETURN : sensor mount angle in uint32_t
12214*==========================================================================*/
12215uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12216{
12217 return gCamCapability[mCameraId]->sensor_mount_angle;
12218}
12219
12220/*===========================================================================
12221* FUNCTION : getRelatedCalibrationData
12222*
12223* DESCRIPTION: Retrieve related system calibration data
12224*
12225* PARAMETERS : None
12226*
12227* RETURN : Pointer of related system calibration data
12228*==========================================================================*/
12229const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12230{
12231 return (const cam_related_system_calibration_data_t *)
12232 &(gCamCapability[mCameraId]->related_cam_calibration);
12233}
12234}; //end namespace qcamera