blob: 175726d89b0eefdc74525cc2d6276232d2bc870d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
60extern "C" {
61#include "mm_camera_dbg.h"
62}
63
64using namespace android;
65
66namespace qcamera {
67
68#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
69
70#define EMPTY_PIPELINE_DELAY 2
71#define PARTIAL_RESULT_COUNT 2
72#define FRAME_SKIP_DELAY 0
73
74#define MAX_VALUE_8BIT ((1<<8)-1)
75#define MAX_VALUE_10BIT ((1<<10)-1)
76#define MAX_VALUE_12BIT ((1<<12)-1)
77
78#define VIDEO_4K_WIDTH 3840
79#define VIDEO_4K_HEIGHT 2160
80
81#define MAX_EIS_WIDTH 1920
82#define MAX_EIS_HEIGHT 1080
83
84#define MAX_RAW_STREAMS 1
85#define MAX_STALLING_STREAMS 1
86#define MAX_PROCESSED_STREAMS 3
87/* Batch mode is enabled only if FPS set is equal to or greater than this */
88#define MIN_FPS_FOR_BATCH_MODE (120)
89#define PREVIEW_FPS_FOR_HFR (30)
90#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080091#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070092#define MAX_HFR_BATCH_SIZE (8)
93#define REGIONS_TUPLE_COUNT 5
94#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070095// Set a threshold for detection of missing buffers //seconds
96#define MISSING_REQUEST_BUF_TIMEOUT 3
97#define FLUSH_TIMEOUT 3
98#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
99
100#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
101 CAM_QCOM_FEATURE_CROP |\
102 CAM_QCOM_FEATURE_ROTATION |\
103 CAM_QCOM_FEATURE_SHARPNESS |\
104 CAM_QCOM_FEATURE_SCALE |\
105 CAM_QCOM_FEATURE_CAC |\
106 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700107/* Per configuration size for static metadata length*/
108#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700109
110#define TIMEOUT_NEVER -1
111
Thierry Strudel04e026f2016-10-10 11:27:36 -0700112/* Face landmarks indices */
113#define LEFT_EYE_X 0
114#define LEFT_EYE_Y 1
115#define RIGHT_EYE_X 2
116#define RIGHT_EYE_Y 3
117#define MOUTH_X 4
118#define MOUTH_Y 5
119#define TOTAL_LANDMARK_INDICES 6
120
Thierry Strudel3d639192016-09-09 11:52:26 -0700121cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
122const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
123extern pthread_mutex_t gCamLock;
124volatile uint32_t gCamHal3LogLevel = 1;
125extern uint8_t gNumCameraSessions;
126
127const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
128 {"On", CAM_CDS_MODE_ON},
129 {"Off", CAM_CDS_MODE_OFF},
130 {"Auto",CAM_CDS_MODE_AUTO}
131};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700132const QCamera3HardwareInterface::QCameraMap<
133 camera_metadata_enum_android_video_hdr_mode_t,
134 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
135 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
136 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
137};
138
139
140const QCamera3HardwareInterface::QCameraMap<
141 camera_metadata_enum_android_ir_mode_t,
142 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
143 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
144 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
145 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
146};
Thierry Strudel3d639192016-09-09 11:52:26 -0700147
148const QCamera3HardwareInterface::QCameraMap<
149 camera_metadata_enum_android_control_effect_mode_t,
150 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
151 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
152 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
153 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
154 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
155 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
156 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
157 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
158 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
159 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
160};
161
162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_control_awb_mode_t,
164 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
165 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
166 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
167 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
168 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
169 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
170 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
171 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
172 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
173 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
174};
175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_control_scene_mode_t,
178 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
179 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
180 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
181 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
182 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
183 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
184 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
185 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
186 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
187 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
188 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
189 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
190 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
191 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
192 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
193 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
194 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
195};
196
197const QCamera3HardwareInterface::QCameraMap<
198 camera_metadata_enum_android_control_af_mode_t,
199 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
200 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
201 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
202 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
203 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
204 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
205 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
206 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
207};
208
209const QCamera3HardwareInterface::QCameraMap<
210 camera_metadata_enum_android_color_correction_aberration_mode_t,
211 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
212 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
213 CAM_COLOR_CORRECTION_ABERRATION_OFF },
214 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
215 CAM_COLOR_CORRECTION_ABERRATION_FAST },
216 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
217 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
218};
219
220const QCamera3HardwareInterface::QCameraMap<
221 camera_metadata_enum_android_control_ae_antibanding_mode_t,
222 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
226 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
227};
228
229const QCamera3HardwareInterface::QCameraMap<
230 camera_metadata_enum_android_control_ae_mode_t,
231 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
232 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
233 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
234 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
235 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
236 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_flash_mode_t,
241 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
242 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
243 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
244 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
245};
246
247const QCamera3HardwareInterface::QCameraMap<
248 camera_metadata_enum_android_statistics_face_detect_mode_t,
249 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
250 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
252 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
253};
254
255const QCamera3HardwareInterface::QCameraMap<
256 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
257 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
258 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
259 CAM_FOCUS_UNCALIBRATED },
260 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
261 CAM_FOCUS_APPROXIMATE },
262 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
263 CAM_FOCUS_CALIBRATED }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_lens_state_t,
268 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
269 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
270 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
271};
272
273const int32_t available_thumbnail_sizes[] = {0, 0,
274 176, 144,
275 240, 144,
276 256, 144,
277 240, 160,
278 256, 154,
279 240, 240,
280 320, 240};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_sensor_test_pattern_mode_t,
284 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
291};
292
293/* Since there is no mapping for all the options some Android enum are not listed.
294 * Also, the order in this list is important because while mapping from HAL to Android it will
295 * traverse from lower to higher index which means that for HAL values that are map to different
296 * Android values, the traverse logic will select the first one found.
297 */
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_sensor_reference_illuminant1_t,
300 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
317};
318
319const QCamera3HardwareInterface::QCameraMap<
320 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
321 { 60, CAM_HFR_MODE_60FPS},
322 { 90, CAM_HFR_MODE_90FPS},
323 { 120, CAM_HFR_MODE_120FPS},
324 { 150, CAM_HFR_MODE_150FPS},
325 { 180, CAM_HFR_MODE_180FPS},
326 { 210, CAM_HFR_MODE_210FPS},
327 { 240, CAM_HFR_MODE_240FPS},
328 { 480, CAM_HFR_MODE_480FPS},
329};
330
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700331const QCamera3HardwareInterface::QCameraMap<
332 qcamera3_ext_instant_aec_mode_t,
333 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
334 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
335 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
336 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
337};
Thierry Strudel3d639192016-09-09 11:52:26 -0700338camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
339 .initialize = QCamera3HardwareInterface::initialize,
340 .configure_streams = QCamera3HardwareInterface::configure_streams,
341 .register_stream_buffers = NULL,
342 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
343 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
344 .get_metadata_vendor_tag_ops = NULL,
345 .dump = QCamera3HardwareInterface::dump,
346 .flush = QCamera3HardwareInterface::flush,
347 .reserved = {0},
348};
349
350// initialise to some default value
351uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
352
353/*===========================================================================
354 * FUNCTION : QCamera3HardwareInterface
355 *
356 * DESCRIPTION: constructor of QCamera3HardwareInterface
357 *
358 * PARAMETERS :
359 * @cameraId : camera ID
360 *
361 * RETURN : none
362 *==========================================================================*/
363QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
364 const camera_module_callbacks_t *callbacks)
365 : mCameraId(cameraId),
366 mCameraHandle(NULL),
367 mCameraInitialized(false),
368 mCallbackOps(NULL),
369 mMetadataChannel(NULL),
370 mPictureChannel(NULL),
371 mRawChannel(NULL),
372 mSupportChannel(NULL),
373 mAnalysisChannel(NULL),
374 mRawDumpChannel(NULL),
375 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800376 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700377 mCommon(),
378 mChannelHandle(0),
379 mFirstConfiguration(true),
380 mFlush(false),
381 mFlushPerf(false),
382 mParamHeap(NULL),
383 mParameters(NULL),
384 mPrevParameters(NULL),
385 m_bIsVideo(false),
386 m_bIs4KVideo(false),
387 m_bEisSupportedSize(false),
388 m_bEisEnable(false),
389 m_MobicatMask(0),
390 mMinProcessedFrameDuration(0),
391 mMinJpegFrameDuration(0),
392 mMinRawFrameDuration(0),
393 mMetaFrameCount(0U),
394 mUpdateDebugLevel(false),
395 mCallbacks(callbacks),
396 mCaptureIntent(0),
397 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800398 /* DevCamDebug metadata internal m control*/
399 mDevCamDebugMetaEnable(0),
400 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700401 mBatchSize(0),
402 mToBeQueuedVidBufs(0),
403 mHFRVideoFps(DEFAULT_VIDEO_FPS),
404 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
405 mFirstFrameNumberInBatch(0),
406 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800407 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700408 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
409 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700410 mInstantAEC(false),
411 mResetInstantAEC(false),
412 mInstantAECSettledFrameNumber(0),
413 mAecSkipDisplayFrameBound(0),
414 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mLastCustIntentFrmNum(-1),
417 mState(CLOSED),
418 mIsDeviceLinked(false),
419 mIsMainCamera(true),
420 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700421 m_pDualCamCmdHeap(NULL),
422 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700423{
424 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700425 mCommon.init(gCamCapability[cameraId]);
426 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700427#ifndef USE_HAL_3_3
428 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
429#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700430 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700431#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCameraDevice.common.close = close_camera_device;
433 mCameraDevice.ops = &mCameraOps;
434 mCameraDevice.priv = this;
435 gCamCapability[cameraId]->version = CAM_HAL_V3;
436 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
437 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
438 gCamCapability[cameraId]->min_num_pp_bufs = 3;
439
440 pthread_cond_init(&mBuffersCond, NULL);
441
442 pthread_cond_init(&mRequestCond, NULL);
443 mPendingLiveRequest = 0;
444 mCurrentRequestId = -1;
445 pthread_mutex_init(&mMutex, NULL);
446
447 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
448 mDefaultMetadata[i] = NULL;
449
450 // Getting system props of different kinds
451 char prop[PROPERTY_VALUE_MAX];
452 memset(prop, 0, sizeof(prop));
453 property_get("persist.camera.raw.dump", prop, "0");
454 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800455 property_get("persist.camera.hal3.force.hdr", prop, "0");
456 mForceHdrSnapshot = atoi(prop);
457
Thierry Strudel3d639192016-09-09 11:52:26 -0700458 if (mEnableRawDump)
459 LOGD("Raw dump from Camera HAL enabled");
460
461 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
462 memset(mLdafCalib, 0, sizeof(mLdafCalib));
463
464 memset(prop, 0, sizeof(prop));
465 property_get("persist.camera.tnr.preview", prop, "0");
466 m_bTnrPreview = (uint8_t)atoi(prop);
467
468 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800469 property_get("persist.camera.swtnr.preview", prop, "1");
470 m_bSwTnrPreview = (uint8_t)atoi(prop);
471
472 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700473 property_get("persist.camera.tnr.video", prop, "0");
474 m_bTnrVideo = (uint8_t)atoi(prop);
475
476 memset(prop, 0, sizeof(prop));
477 property_get("persist.camera.avtimer.debug", prop, "0");
478 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800479 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700480
481 //Load and read GPU library.
482 lib_surface_utils = NULL;
483 LINK_get_surface_pixel_alignment = NULL;
484 mSurfaceStridePadding = CAM_PAD_TO_32;
485 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
486 if (lib_surface_utils) {
487 *(void **)&LINK_get_surface_pixel_alignment =
488 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
489 if (LINK_get_surface_pixel_alignment) {
490 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
491 }
492 dlclose(lib_surface_utils);
493 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700494
495 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700496}
497
498/*===========================================================================
499 * FUNCTION : ~QCamera3HardwareInterface
500 *
501 * DESCRIPTION: destructor of QCamera3HardwareInterface
502 *
503 * PARAMETERS : none
504 *
505 * RETURN : none
506 *==========================================================================*/
507QCamera3HardwareInterface::~QCamera3HardwareInterface()
508{
509 LOGD("E");
510
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800511 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700512
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800513 // Disable power hint and enable the perf lock for close camera
514 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
515 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
516
517 // unlink of dualcam during close camera
518 if (mIsDeviceLinked) {
519 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
520 &m_pDualCamCmdPtr->bundle_info;
521 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
522 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
523 pthread_mutex_lock(&gCamLock);
524
525 if (mIsMainCamera == 1) {
526 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
527 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
528 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
529 // related session id should be session id of linked session
530 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
531 } else {
532 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
533 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
534 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
535 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
536 }
537 pthread_mutex_unlock(&gCamLock);
538
539 rc = mCameraHandle->ops->set_dual_cam_cmd(
540 mCameraHandle->camera_handle);
541 if (rc < 0) {
542 LOGE("Dualcam: Unlink failed, but still proceed to close");
543 }
544 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700545
546 /* We need to stop all streams before deleting any stream */
547 if (mRawDumpChannel) {
548 mRawDumpChannel->stop();
549 }
550
551 // NOTE: 'camera3_stream_t *' objects are already freed at
552 // this stage by the framework
553 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
554 it != mStreamInfo.end(); it++) {
555 QCamera3ProcessingChannel *channel = (*it)->channel;
556 if (channel) {
557 channel->stop();
558 }
559 }
560 if (mSupportChannel)
561 mSupportChannel->stop();
562
563 if (mAnalysisChannel) {
564 mAnalysisChannel->stop();
565 }
566 if (mMetadataChannel) {
567 mMetadataChannel->stop();
568 }
569 if (mChannelHandle) {
570 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
571 mChannelHandle);
572 LOGD("stopping channel %d", mChannelHandle);
573 }
574
575 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
576 it != mStreamInfo.end(); it++) {
577 QCamera3ProcessingChannel *channel = (*it)->channel;
578 if (channel)
579 delete channel;
580 free (*it);
581 }
582 if (mSupportChannel) {
583 delete mSupportChannel;
584 mSupportChannel = NULL;
585 }
586
587 if (mAnalysisChannel) {
588 delete mAnalysisChannel;
589 mAnalysisChannel = NULL;
590 }
591 if (mRawDumpChannel) {
592 delete mRawDumpChannel;
593 mRawDumpChannel = NULL;
594 }
595 if (mDummyBatchChannel) {
596 delete mDummyBatchChannel;
597 mDummyBatchChannel = NULL;
598 }
599
600 mPictureChannel = NULL;
601
602 if (mMetadataChannel) {
603 delete mMetadataChannel;
604 mMetadataChannel = NULL;
605 }
606
607 /* Clean up all channels */
608 if (mCameraInitialized) {
609 if(!mFirstConfiguration){
610 //send the last unconfigure
611 cam_stream_size_info_t stream_config_info;
612 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
613 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
614 stream_config_info.buffer_info.max_buffers =
615 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700616 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700617 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
618 stream_config_info);
619 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
620 if (rc < 0) {
621 LOGE("set_parms failed for unconfigure");
622 }
623 }
624 deinitParameters();
625 }
626
627 if (mChannelHandle) {
628 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
629 mChannelHandle);
630 LOGH("deleting channel %d", mChannelHandle);
631 mChannelHandle = 0;
632 }
633
634 if (mState != CLOSED)
635 closeCamera();
636
637 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
638 req.mPendingBufferList.clear();
639 }
640 mPendingBuffersMap.mPendingBuffersInRequest.clear();
641 mPendingReprocessResultList.clear();
642 for (pendingRequestIterator i = mPendingRequestsList.begin();
643 i != mPendingRequestsList.end();) {
644 i = erasePendingRequest(i);
645 }
646 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
647 if (mDefaultMetadata[i])
648 free_camera_metadata(mDefaultMetadata[i]);
649
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800650 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700651
652 pthread_cond_destroy(&mRequestCond);
653
654 pthread_cond_destroy(&mBuffersCond);
655
656 pthread_mutex_destroy(&mMutex);
657 LOGD("X");
658}
659
660/*===========================================================================
661 * FUNCTION : erasePendingRequest
662 *
663 * DESCRIPTION: function to erase a desired pending request after freeing any
664 * allocated memory
665 *
666 * PARAMETERS :
667 * @i : iterator pointing to pending request to be erased
668 *
669 * RETURN : iterator pointing to the next request
670 *==========================================================================*/
671QCamera3HardwareInterface::pendingRequestIterator
672 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
673{
674 if (i->input_buffer != NULL) {
675 free(i->input_buffer);
676 i->input_buffer = NULL;
677 }
678 if (i->settings != NULL)
679 free_camera_metadata((camera_metadata_t*)i->settings);
680 return mPendingRequestsList.erase(i);
681}
682
683/*===========================================================================
684 * FUNCTION : camEvtHandle
685 *
686 * DESCRIPTION: Function registered to mm-camera-interface to handle events
687 *
688 * PARAMETERS :
689 * @camera_handle : interface layer camera handle
690 * @evt : ptr to event
691 * @user_data : user data ptr
692 *
693 * RETURN : none
694 *==========================================================================*/
695void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
696 mm_camera_event_t *evt,
697 void *user_data)
698{
699 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
700 if (obj && evt) {
701 switch(evt->server_event_type) {
702 case CAM_EVENT_TYPE_DAEMON_DIED:
703 pthread_mutex_lock(&obj->mMutex);
704 obj->mState = ERROR;
705 pthread_mutex_unlock(&obj->mMutex);
706 LOGE("Fatal, camera daemon died");
707 break;
708
709 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
710 LOGD("HAL got request pull from Daemon");
711 pthread_mutex_lock(&obj->mMutex);
712 obj->mWokenUpByDaemon = true;
713 obj->unblockRequestIfNecessary();
714 pthread_mutex_unlock(&obj->mMutex);
715 break;
716
717 default:
718 LOGW("Warning: Unhandled event %d",
719 evt->server_event_type);
720 break;
721 }
722 } else {
723 LOGE("NULL user_data/evt");
724 }
725}
726
727/*===========================================================================
728 * FUNCTION : openCamera
729 *
730 * DESCRIPTION: open camera
731 *
732 * PARAMETERS :
733 * @hw_device : double ptr for camera device struct
734 *
735 * RETURN : int32_t type of status
736 * NO_ERROR -- success
737 * none-zero failure code
738 *==========================================================================*/
739int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
740{
741 int rc = 0;
742 if (mState != CLOSED) {
743 *hw_device = NULL;
744 return PERMISSION_DENIED;
745 }
746
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800747 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700748 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
749 mCameraId);
750
751 rc = openCamera();
752 if (rc == 0) {
753 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800754 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700755 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800756 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700757
Thierry Strudel3d639192016-09-09 11:52:26 -0700758 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
759 mCameraId, rc);
760
761 if (rc == NO_ERROR) {
762 mState = OPENED;
763 }
764 return rc;
765}
766
767/*===========================================================================
768 * FUNCTION : openCamera
769 *
770 * DESCRIPTION: open camera
771 *
772 * PARAMETERS : none
773 *
774 * RETURN : int32_t type of status
775 * NO_ERROR -- success
776 * none-zero failure code
777 *==========================================================================*/
778int QCamera3HardwareInterface::openCamera()
779{
780 int rc = 0;
781 char value[PROPERTY_VALUE_MAX];
782
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800783 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700784 if (mCameraHandle) {
785 LOGE("Failure: Camera already opened");
786 return ALREADY_EXISTS;
787 }
788
789 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
790 if (rc < 0) {
791 LOGE("Failed to reserve flash for camera id: %d",
792 mCameraId);
793 return UNKNOWN_ERROR;
794 }
795
796 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
797 if (rc) {
798 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
799 return rc;
800 }
801
802 if (!mCameraHandle) {
803 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
804 return -ENODEV;
805 }
806
807 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
808 camEvtHandle, (void *)this);
809
810 if (rc < 0) {
811 LOGE("Error, failed to register event callback");
812 /* Not closing camera here since it is already handled in destructor */
813 return FAILED_TRANSACTION;
814 }
815
816 mExifParams.debug_params =
817 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
818 if (mExifParams.debug_params) {
819 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
820 } else {
821 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
822 return NO_MEMORY;
823 }
824 mFirstConfiguration = true;
825
826 //Notify display HAL that a camera session is active.
827 //But avoid calling the same during bootup because camera service might open/close
828 //cameras at boot time during its initialization and display service will also internally
829 //wait for camera service to initialize first while calling this display API, resulting in a
830 //deadlock situation. Since boot time camera open/close calls are made only to fetch
831 //capabilities, no need of this display bw optimization.
832 //Use "service.bootanim.exit" property to know boot status.
833 property_get("service.bootanim.exit", value, "0");
834 if (atoi(value) == 1) {
835 pthread_mutex_lock(&gCamLock);
836 if (gNumCameraSessions++ == 0) {
837 setCameraLaunchStatus(true);
838 }
839 pthread_mutex_unlock(&gCamLock);
840 }
841
842 //fill the session id needed while linking dual cam
843 pthread_mutex_lock(&gCamLock);
844 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
845 &sessionId[mCameraId]);
846 pthread_mutex_unlock(&gCamLock);
847
848 if (rc < 0) {
849 LOGE("Error, failed to get sessiion id");
850 return UNKNOWN_ERROR;
851 } else {
852 //Allocate related cam sync buffer
853 //this is needed for the payload that goes along with bundling cmd for related
854 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700855 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
856 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 if(rc != OK) {
858 rc = NO_MEMORY;
859 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
860 return NO_MEMORY;
861 }
862
863 //Map memory for related cam sync buffer
864 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700865 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
866 m_pDualCamCmdHeap->getFd(0),
867 sizeof(cam_dual_camera_cmd_info_t),
868 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 if(rc < 0) {
870 LOGE("Dualcam: failed to map Related cam sync buffer");
871 rc = FAILED_TRANSACTION;
872 return NO_MEMORY;
873 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700874 m_pDualCamCmdPtr =
875 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700876 }
877
878 LOGH("mCameraId=%d",mCameraId);
879
880 return NO_ERROR;
881}
882
883/*===========================================================================
884 * FUNCTION : closeCamera
885 *
886 * DESCRIPTION: close camera
887 *
888 * PARAMETERS : none
889 *
890 * RETURN : int32_t type of status
891 * NO_ERROR -- success
892 * none-zero failure code
893 *==========================================================================*/
894int QCamera3HardwareInterface::closeCamera()
895{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800896 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700897 int rc = NO_ERROR;
898 char value[PROPERTY_VALUE_MAX];
899
900 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
901 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700902
903 // unmap memory for related cam sync buffer
904 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800905 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700906 if (NULL != m_pDualCamCmdHeap) {
907 m_pDualCamCmdHeap->deallocate();
908 delete m_pDualCamCmdHeap;
909 m_pDualCamCmdHeap = NULL;
910 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700911 }
912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
914 mCameraHandle = NULL;
915
916 //reset session id to some invalid id
917 pthread_mutex_lock(&gCamLock);
918 sessionId[mCameraId] = 0xDEADBEEF;
919 pthread_mutex_unlock(&gCamLock);
920
921 //Notify display HAL that there is no active camera session
922 //but avoid calling the same during bootup. Refer to openCamera
923 //for more details.
924 property_get("service.bootanim.exit", value, "0");
925 if (atoi(value) == 1) {
926 pthread_mutex_lock(&gCamLock);
927 if (--gNumCameraSessions == 0) {
928 setCameraLaunchStatus(false);
929 }
930 pthread_mutex_unlock(&gCamLock);
931 }
932
Thierry Strudel3d639192016-09-09 11:52:26 -0700933 if (mExifParams.debug_params) {
934 free(mExifParams.debug_params);
935 mExifParams.debug_params = NULL;
936 }
937 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
938 LOGW("Failed to release flash for camera id: %d",
939 mCameraId);
940 }
941 mState = CLOSED;
942 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
943 mCameraId, rc);
944 return rc;
945}
946
947/*===========================================================================
948 * FUNCTION : initialize
949 *
950 * DESCRIPTION: Initialize frameworks callback functions
951 *
952 * PARAMETERS :
953 * @callback_ops : callback function to frameworks
954 *
955 * RETURN :
956 *
957 *==========================================================================*/
958int QCamera3HardwareInterface::initialize(
959 const struct camera3_callback_ops *callback_ops)
960{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800961 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700962 int rc;
963
964 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
965 pthread_mutex_lock(&mMutex);
966
967 // Validate current state
968 switch (mState) {
969 case OPENED:
970 /* valid state */
971 break;
972 default:
973 LOGE("Invalid state %d", mState);
974 rc = -ENODEV;
975 goto err1;
976 }
977
978 rc = initParameters();
979 if (rc < 0) {
980 LOGE("initParamters failed %d", rc);
981 goto err1;
982 }
983 mCallbackOps = callback_ops;
984
985 mChannelHandle = mCameraHandle->ops->add_channel(
986 mCameraHandle->camera_handle, NULL, NULL, this);
987 if (mChannelHandle == 0) {
988 LOGE("add_channel failed");
989 rc = -ENOMEM;
990 pthread_mutex_unlock(&mMutex);
991 return rc;
992 }
993
994 pthread_mutex_unlock(&mMutex);
995 mCameraInitialized = true;
996 mState = INITIALIZED;
997 LOGI("X");
998 return 0;
999
1000err1:
1001 pthread_mutex_unlock(&mMutex);
1002 return rc;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : validateStreamDimensions
1007 *
1008 * DESCRIPTION: Check if the configuration requested are those advertised
1009 *
1010 * PARAMETERS :
1011 * @stream_list : streams to be configured
1012 *
1013 * RETURN :
1014 *
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::validateStreamDimensions(
1017 camera3_stream_configuration_t *streamList)
1018{
1019 int rc = NO_ERROR;
1020 size_t count = 0;
1021
1022 camera3_stream_t *inputStream = NULL;
1023 /*
1024 * Loop through all streams to find input stream if it exists*
1025 */
1026 for (size_t i = 0; i< streamList->num_streams; i++) {
1027 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1028 if (inputStream != NULL) {
1029 LOGE("Error, Multiple input streams requested");
1030 return -EINVAL;
1031 }
1032 inputStream = streamList->streams[i];
1033 }
1034 }
1035 /*
1036 * Loop through all streams requested in configuration
1037 * Check if unsupported sizes have been requested on any of them
1038 */
1039 for (size_t j = 0; j < streamList->num_streams; j++) {
1040 bool sizeFound = false;
1041 camera3_stream_t *newStream = streamList->streams[j];
1042
1043 uint32_t rotatedHeight = newStream->height;
1044 uint32_t rotatedWidth = newStream->width;
1045 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1046 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1047 rotatedHeight = newStream->width;
1048 rotatedWidth = newStream->height;
1049 }
1050
1051 /*
1052 * Sizes are different for each type of stream format check against
1053 * appropriate table.
1054 */
1055 switch (newStream->format) {
1056 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1057 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1058 case HAL_PIXEL_FORMAT_RAW10:
1059 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1060 for (size_t i = 0; i < count; i++) {
1061 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1062 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1063 sizeFound = true;
1064 break;
1065 }
1066 }
1067 break;
1068 case HAL_PIXEL_FORMAT_BLOB:
1069 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1070 /* Verify set size against generated sizes table */
1071 for (size_t i = 0; i < count; i++) {
1072 if (((int32_t)rotatedWidth ==
1073 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1074 ((int32_t)rotatedHeight ==
1075 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1076 sizeFound = true;
1077 break;
1078 }
1079 }
1080 break;
1081 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1082 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1083 default:
1084 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1085 || newStream->stream_type == CAMERA3_STREAM_INPUT
1086 || IS_USAGE_ZSL(newStream->usage)) {
1087 if (((int32_t)rotatedWidth ==
1088 gCamCapability[mCameraId]->active_array_size.width) &&
1089 ((int32_t)rotatedHeight ==
1090 gCamCapability[mCameraId]->active_array_size.height)) {
1091 sizeFound = true;
1092 break;
1093 }
1094 /* We could potentially break here to enforce ZSL stream
1095 * set from frameworks always is full active array size
1096 * but it is not clear from the spc if framework will always
1097 * follow that, also we have logic to override to full array
1098 * size, so keeping the logic lenient at the moment
1099 */
1100 }
1101 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1102 MAX_SIZES_CNT);
1103 for (size_t i = 0; i < count; i++) {
1104 if (((int32_t)rotatedWidth ==
1105 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1106 ((int32_t)rotatedHeight ==
1107 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1108 sizeFound = true;
1109 break;
1110 }
1111 }
1112 break;
1113 } /* End of switch(newStream->format) */
1114
1115 /* We error out even if a single stream has unsupported size set */
1116 if (!sizeFound) {
1117 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1118 rotatedWidth, rotatedHeight, newStream->format,
1119 gCamCapability[mCameraId]->active_array_size.width,
1120 gCamCapability[mCameraId]->active_array_size.height);
1121 rc = -EINVAL;
1122 break;
1123 }
1124 } /* End of for each stream */
1125 return rc;
1126}
1127
1128/*==============================================================================
1129 * FUNCTION : isSupportChannelNeeded
1130 *
1131 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1132 *
1133 * PARAMETERS :
1134 * @stream_list : streams to be configured
1135 * @stream_config_info : the config info for streams to be configured
1136 *
1137 * RETURN : Boolen true/false decision
1138 *
1139 *==========================================================================*/
1140bool QCamera3HardwareInterface::isSupportChannelNeeded(
1141 camera3_stream_configuration_t *streamList,
1142 cam_stream_size_info_t stream_config_info)
1143{
1144 uint32_t i;
1145 bool pprocRequested = false;
1146 /* Check for conditions where PProc pipeline does not have any streams*/
1147 for (i = 0; i < stream_config_info.num_streams; i++) {
1148 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1149 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1150 pprocRequested = true;
1151 break;
1152 }
1153 }
1154
1155 if (pprocRequested == false )
1156 return true;
1157
1158 /* Dummy stream needed if only raw or jpeg streams present */
1159 for (i = 0; i < streamList->num_streams; i++) {
1160 switch(streamList->streams[i]->format) {
1161 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1162 case HAL_PIXEL_FORMAT_RAW10:
1163 case HAL_PIXEL_FORMAT_RAW16:
1164 case HAL_PIXEL_FORMAT_BLOB:
1165 break;
1166 default:
1167 return false;
1168 }
1169 }
1170 return true;
1171}
1172
1173/*==============================================================================
1174 * FUNCTION : getSensorOutputSize
1175 *
1176 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1177 *
1178 * PARAMETERS :
1179 * @sensor_dim : sensor output dimension (output)
1180 *
1181 * RETURN : int32_t type of status
1182 * NO_ERROR -- success
1183 * none-zero failure code
1184 *
1185 *==========================================================================*/
1186int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1187{
1188 int32_t rc = NO_ERROR;
1189
1190 cam_dimension_t max_dim = {0, 0};
1191 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1192 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1193 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1194 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1195 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1196 }
1197
1198 clear_metadata_buffer(mParameters);
1199
1200 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1201 max_dim);
1202 if (rc != NO_ERROR) {
1203 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1204 return rc;
1205 }
1206
1207 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1208 if (rc != NO_ERROR) {
1209 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1210 return rc;
1211 }
1212
1213 clear_metadata_buffer(mParameters);
1214 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1215
1216 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1217 mParameters);
1218 if (rc != NO_ERROR) {
1219 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1220 return rc;
1221 }
1222
1223 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1224 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1225
1226 return rc;
1227}
1228
1229/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001230 * FUNCTION : addToPPFeatureMask
1231 *
1232 * DESCRIPTION: add additional features to pp feature mask based on
1233 * stream type and usecase
1234 *
1235 * PARAMETERS :
1236 * @stream_format : stream type for feature mask
1237 * @stream_idx : stream idx within postprocess_mask list to change
1238 *
1239 * RETURN : NULL
1240 *
1241 *==========================================================================*/
1242void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1243 uint32_t stream_idx)
1244{
1245 char feature_mask_value[PROPERTY_VALUE_MAX];
1246 cam_feature_mask_t feature_mask;
1247 int args_converted;
1248 int property_len;
1249
1250 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001251#ifdef _LE_CAMERA_
1252 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1253 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1254 property_len = property_get("persist.camera.hal3.feature",
1255 feature_mask_value, swtnr_feature_mask_value);
1256#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001257 property_len = property_get("persist.camera.hal3.feature",
1258 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001259#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001260 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1261 (feature_mask_value[1] == 'x')) {
1262 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1263 } else {
1264 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1265 }
1266 if (1 != args_converted) {
1267 feature_mask = 0;
1268 LOGE("Wrong feature mask %s", feature_mask_value);
1269 return;
1270 }
1271
1272 switch (stream_format) {
1273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1274 /* Add LLVD to pp feature mask only if video hint is enabled */
1275 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1276 mStreamConfigInfo.postprocess_mask[stream_idx]
1277 |= CAM_QTI_FEATURE_SW_TNR;
1278 LOGH("Added SW TNR to pp feature mask");
1279 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1280 mStreamConfigInfo.postprocess_mask[stream_idx]
1281 |= CAM_QCOM_FEATURE_LLVD;
1282 LOGH("Added LLVD SeeMore to pp feature mask");
1283 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001284 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1285 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1286 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1287 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001288 break;
1289 }
1290 default:
1291 break;
1292 }
1293 LOGD("PP feature mask %llx",
1294 mStreamConfigInfo.postprocess_mask[stream_idx]);
1295}
1296
1297/*==============================================================================
1298 * FUNCTION : updateFpsInPreviewBuffer
1299 *
1300 * DESCRIPTION: update FPS information in preview buffer.
1301 *
1302 * PARAMETERS :
1303 * @metadata : pointer to metadata buffer
1304 * @frame_number: frame_number to look for in pending buffer list
1305 *
1306 * RETURN : None
1307 *
1308 *==========================================================================*/
1309void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1310 uint32_t frame_number)
1311{
1312 // Mark all pending buffers for this particular request
1313 // with corresponding framerate information
1314 for (List<PendingBuffersInRequest>::iterator req =
1315 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1316 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1317 for(List<PendingBufferInfo>::iterator j =
1318 req->mPendingBufferList.begin();
1319 j != req->mPendingBufferList.end(); j++) {
1320 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1321 if ((req->frame_number == frame_number) &&
1322 (channel->getStreamTypeMask() &
1323 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1324 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1325 CAM_INTF_PARM_FPS_RANGE, metadata) {
1326 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1327 struct private_handle_t *priv_handle =
1328 (struct private_handle_t *)(*(j->buffer));
1329 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1330 }
1331 }
1332 }
1333 }
1334}
1335
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001336/*==============================================================================
1337 * FUNCTION : updateTimeStampInPendingBuffers
1338 *
1339 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1340 * of a frame number
1341 *
1342 * PARAMETERS :
1343 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1344 * @timestamp : timestamp to be set
1345 *
1346 * RETURN : None
1347 *
1348 *==========================================================================*/
1349void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1350 uint32_t frameNumber, nsecs_t timestamp)
1351{
1352 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1353 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1354 if (req->frame_number != frameNumber)
1355 continue;
1356
1357 for (auto k = req->mPendingBufferList.begin();
1358 k != req->mPendingBufferList.end(); k++ ) {
1359 struct private_handle_t *priv_handle =
1360 (struct private_handle_t *) (*(k->buffer));
1361 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1362 }
1363 }
1364 return;
1365}
1366
Thierry Strudel3d639192016-09-09 11:52:26 -07001367/*===========================================================================
1368 * FUNCTION : configureStreams
1369 *
1370 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1371 * and output streams.
1372 *
1373 * PARAMETERS :
1374 * @stream_list : streams to be configured
1375 *
1376 * RETURN :
1377 *
1378 *==========================================================================*/
1379int QCamera3HardwareInterface::configureStreams(
1380 camera3_stream_configuration_t *streamList)
1381{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001382 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001383 int rc = 0;
1384
1385 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001386 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001387 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001388 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001389
1390 return rc;
1391}
1392
1393/*===========================================================================
1394 * FUNCTION : configureStreamsPerfLocked
1395 *
1396 * DESCRIPTION: configureStreams while perfLock is held.
1397 *
1398 * PARAMETERS :
1399 * @stream_list : streams to be configured
1400 *
1401 * RETURN : int32_t type of status
1402 * NO_ERROR -- success
1403 * none-zero failure code
1404 *==========================================================================*/
1405int QCamera3HardwareInterface::configureStreamsPerfLocked(
1406 camera3_stream_configuration_t *streamList)
1407{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001408 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001409 int rc = 0;
1410
1411 // Sanity check stream_list
1412 if (streamList == NULL) {
1413 LOGE("NULL stream configuration");
1414 return BAD_VALUE;
1415 }
1416 if (streamList->streams == NULL) {
1417 LOGE("NULL stream list");
1418 return BAD_VALUE;
1419 }
1420
1421 if (streamList->num_streams < 1) {
1422 LOGE("Bad number of streams requested: %d",
1423 streamList->num_streams);
1424 return BAD_VALUE;
1425 }
1426
1427 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1428 LOGE("Maximum number of streams %d exceeded: %d",
1429 MAX_NUM_STREAMS, streamList->num_streams);
1430 return BAD_VALUE;
1431 }
1432
1433 mOpMode = streamList->operation_mode;
1434 LOGD("mOpMode: %d", mOpMode);
1435
1436 /* first invalidate all the steams in the mStreamList
1437 * if they appear again, they will be validated */
1438 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1439 it != mStreamInfo.end(); it++) {
1440 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1441 if (channel) {
1442 channel->stop();
1443 }
1444 (*it)->status = INVALID;
1445 }
1446
1447 if (mRawDumpChannel) {
1448 mRawDumpChannel->stop();
1449 delete mRawDumpChannel;
1450 mRawDumpChannel = NULL;
1451 }
1452
1453 if (mSupportChannel)
1454 mSupportChannel->stop();
1455
1456 if (mAnalysisChannel) {
1457 mAnalysisChannel->stop();
1458 }
1459 if (mMetadataChannel) {
1460 /* If content of mStreamInfo is not 0, there is metadata stream */
1461 mMetadataChannel->stop();
1462 }
1463 if (mChannelHandle) {
1464 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1465 mChannelHandle);
1466 LOGD("stopping channel %d", mChannelHandle);
1467 }
1468
1469 pthread_mutex_lock(&mMutex);
1470
1471 // Check state
1472 switch (mState) {
1473 case INITIALIZED:
1474 case CONFIGURED:
1475 case STARTED:
1476 /* valid state */
1477 break;
1478 default:
1479 LOGE("Invalid state %d", mState);
1480 pthread_mutex_unlock(&mMutex);
1481 return -ENODEV;
1482 }
1483
1484 /* Check whether we have video stream */
1485 m_bIs4KVideo = false;
1486 m_bIsVideo = false;
1487 m_bEisSupportedSize = false;
1488 m_bTnrEnabled = false;
1489 bool isZsl = false;
1490 uint32_t videoWidth = 0U;
1491 uint32_t videoHeight = 0U;
1492 size_t rawStreamCnt = 0;
1493 size_t stallStreamCnt = 0;
1494 size_t processedStreamCnt = 0;
1495 // Number of streams on ISP encoder path
1496 size_t numStreamsOnEncoder = 0;
1497 size_t numYuv888OnEncoder = 0;
1498 bool bYuv888OverrideJpeg = false;
1499 cam_dimension_t largeYuv888Size = {0, 0};
1500 cam_dimension_t maxViewfinderSize = {0, 0};
1501 bool bJpegExceeds4K = false;
1502 bool bJpegOnEncoder = false;
1503 bool bUseCommonFeatureMask = false;
1504 cam_feature_mask_t commonFeatureMask = 0;
1505 bool bSmallJpegSize = false;
1506 uint32_t width_ratio;
1507 uint32_t height_ratio;
1508 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1509 camera3_stream_t *inputStream = NULL;
1510 bool isJpeg = false;
1511 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001512 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001513
1514 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1515
1516 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001517 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001518 uint8_t eis_prop_set;
1519 uint32_t maxEisWidth = 0;
1520 uint32_t maxEisHeight = 0;
1521
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001522 // Initialize all instant AEC related variables
1523 mInstantAEC = false;
1524 mResetInstantAEC = false;
1525 mInstantAECSettledFrameNumber = 0;
1526 mAecSkipDisplayFrameBound = 0;
1527 mInstantAecFrameIdxCount = 0;
1528
Thierry Strudel3d639192016-09-09 11:52:26 -07001529 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1530
1531 size_t count = IS_TYPE_MAX;
1532 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1533 for (size_t i = 0; i < count; i++) {
1534 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001535 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1536 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001537 break;
1538 }
1539 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001540 count = CAM_OPT_STAB_MAX;
1541 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1542 for (size_t i = 0; i < count; i++) {
1543 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1544 oisSupported = true;
1545 break;
1546 }
1547 }
1548
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001549 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001550 maxEisWidth = MAX_EIS_WIDTH;
1551 maxEisHeight = MAX_EIS_HEIGHT;
1552 }
1553
1554 /* EIS setprop control */
1555 char eis_prop[PROPERTY_VALUE_MAX];
1556 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001557 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 eis_prop_set = (uint8_t)atoi(eis_prop);
1559
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001560 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001561 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1562
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001563 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1564 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1565
Thierry Strudel3d639192016-09-09 11:52:26 -07001566 /* stream configurations */
1567 for (size_t i = 0; i < streamList->num_streams; i++) {
1568 camera3_stream_t *newStream = streamList->streams[i];
1569 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1570 "height = %d, rotation = %d, usage = 0x%x",
1571 i, newStream->stream_type, newStream->format,
1572 newStream->width, newStream->height, newStream->rotation,
1573 newStream->usage);
1574 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1575 newStream->stream_type == CAMERA3_STREAM_INPUT){
1576 isZsl = true;
1577 }
1578 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1579 inputStream = newStream;
1580 }
1581
1582 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1583 isJpeg = true;
1584 jpegSize.width = newStream->width;
1585 jpegSize.height = newStream->height;
1586 if (newStream->width > VIDEO_4K_WIDTH ||
1587 newStream->height > VIDEO_4K_HEIGHT)
1588 bJpegExceeds4K = true;
1589 }
1590
1591 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1592 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1593 m_bIsVideo = true;
1594 videoWidth = newStream->width;
1595 videoHeight = newStream->height;
1596 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1597 (VIDEO_4K_HEIGHT <= newStream->height)) {
1598 m_bIs4KVideo = true;
1599 }
1600 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1601 (newStream->height <= maxEisHeight);
1602 }
1603 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1604 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1605 switch (newStream->format) {
1606 case HAL_PIXEL_FORMAT_BLOB:
1607 stallStreamCnt++;
1608 if (isOnEncoder(maxViewfinderSize, newStream->width,
1609 newStream->height)) {
1610 numStreamsOnEncoder++;
1611 bJpegOnEncoder = true;
1612 }
1613 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1614 newStream->width);
1615 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1616 newStream->height);;
1617 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1618 "FATAL: max_downscale_factor cannot be zero and so assert");
1619 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1620 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1621 LOGH("Setting small jpeg size flag to true");
1622 bSmallJpegSize = true;
1623 }
1624 break;
1625 case HAL_PIXEL_FORMAT_RAW10:
1626 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1627 case HAL_PIXEL_FORMAT_RAW16:
1628 rawStreamCnt++;
1629 break;
1630 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1631 processedStreamCnt++;
1632 if (isOnEncoder(maxViewfinderSize, newStream->width,
1633 newStream->height)) {
1634 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1635 !IS_USAGE_ZSL(newStream->usage)) {
1636 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1637 }
1638 numStreamsOnEncoder++;
1639 }
1640 break;
1641 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1642 processedStreamCnt++;
1643 if (isOnEncoder(maxViewfinderSize, newStream->width,
1644 newStream->height)) {
1645 // If Yuv888 size is not greater than 4K, set feature mask
1646 // to SUPERSET so that it support concurrent request on
1647 // YUV and JPEG.
1648 if (newStream->width <= VIDEO_4K_WIDTH &&
1649 newStream->height <= VIDEO_4K_HEIGHT) {
1650 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1651 }
1652 numStreamsOnEncoder++;
1653 numYuv888OnEncoder++;
1654 largeYuv888Size.width = newStream->width;
1655 largeYuv888Size.height = newStream->height;
1656 }
1657 break;
1658 default:
1659 processedStreamCnt++;
1660 if (isOnEncoder(maxViewfinderSize, newStream->width,
1661 newStream->height)) {
1662 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1663 numStreamsOnEncoder++;
1664 }
1665 break;
1666 }
1667
1668 }
1669 }
1670
1671 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1672 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1673 !m_bIsVideo) {
1674 m_bEisEnable = false;
1675 }
1676
1677 /* Logic to enable/disable TNR based on specific config size/etc.*/
1678 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1679 ((videoWidth == 1920 && videoHeight == 1080) ||
1680 (videoWidth == 1280 && videoHeight == 720)) &&
1681 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1682 m_bTnrEnabled = true;
1683
1684 /* Check if num_streams is sane */
1685 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1686 rawStreamCnt > MAX_RAW_STREAMS ||
1687 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1688 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1689 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1690 pthread_mutex_unlock(&mMutex);
1691 return -EINVAL;
1692 }
1693 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001694 if (isZsl && m_bIs4KVideo) {
1695 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001696 pthread_mutex_unlock(&mMutex);
1697 return -EINVAL;
1698 }
1699 /* Check if stream sizes are sane */
1700 if (numStreamsOnEncoder > 2) {
1701 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1702 pthread_mutex_unlock(&mMutex);
1703 return -EINVAL;
1704 } else if (1 < numStreamsOnEncoder){
1705 bUseCommonFeatureMask = true;
1706 LOGH("Multiple streams above max viewfinder size, common mask needed");
1707 }
1708
1709 /* Check if BLOB size is greater than 4k in 4k recording case */
1710 if (m_bIs4KVideo && bJpegExceeds4K) {
1711 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1712 pthread_mutex_unlock(&mMutex);
1713 return -EINVAL;
1714 }
1715
1716 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1717 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1718 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1719 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1720 // configurations:
1721 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1722 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1723 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1724 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1725 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1726 __func__);
1727 pthread_mutex_unlock(&mMutex);
1728 return -EINVAL;
1729 }
1730
1731 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1732 // the YUV stream's size is greater or equal to the JPEG size, set common
1733 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1734 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1735 jpegSize.width, jpegSize.height) &&
1736 largeYuv888Size.width > jpegSize.width &&
1737 largeYuv888Size.height > jpegSize.height) {
1738 bYuv888OverrideJpeg = true;
1739 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1740 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1741 }
1742
1743 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1744 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1745 commonFeatureMask);
1746 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1747 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1748
1749 rc = validateStreamDimensions(streamList);
1750 if (rc == NO_ERROR) {
1751 rc = validateStreamRotations(streamList);
1752 }
1753 if (rc != NO_ERROR) {
1754 LOGE("Invalid stream configuration requested!");
1755 pthread_mutex_unlock(&mMutex);
1756 return rc;
1757 }
1758
1759 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1760 for (size_t i = 0; i < streamList->num_streams; i++) {
1761 camera3_stream_t *newStream = streamList->streams[i];
1762 LOGH("newStream type = %d, stream format = %d "
1763 "stream size : %d x %d, stream rotation = %d",
1764 newStream->stream_type, newStream->format,
1765 newStream->width, newStream->height, newStream->rotation);
1766 //if the stream is in the mStreamList validate it
1767 bool stream_exists = false;
1768 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1769 it != mStreamInfo.end(); it++) {
1770 if ((*it)->stream == newStream) {
1771 QCamera3ProcessingChannel *channel =
1772 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1773 stream_exists = true;
1774 if (channel)
1775 delete channel;
1776 (*it)->status = VALID;
1777 (*it)->stream->priv = NULL;
1778 (*it)->channel = NULL;
1779 }
1780 }
1781 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1782 //new stream
1783 stream_info_t* stream_info;
1784 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1785 if (!stream_info) {
1786 LOGE("Could not allocate stream info");
1787 rc = -ENOMEM;
1788 pthread_mutex_unlock(&mMutex);
1789 return rc;
1790 }
1791 stream_info->stream = newStream;
1792 stream_info->status = VALID;
1793 stream_info->channel = NULL;
1794 mStreamInfo.push_back(stream_info);
1795 }
1796 /* Covers Opaque ZSL and API1 F/W ZSL */
1797 if (IS_USAGE_ZSL(newStream->usage)
1798 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1799 if (zslStream != NULL) {
1800 LOGE("Multiple input/reprocess streams requested!");
1801 pthread_mutex_unlock(&mMutex);
1802 return BAD_VALUE;
1803 }
1804 zslStream = newStream;
1805 }
1806 /* Covers YUV reprocess */
1807 if (inputStream != NULL) {
1808 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1809 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1810 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1811 && inputStream->width == newStream->width
1812 && inputStream->height == newStream->height) {
1813 if (zslStream != NULL) {
1814 /* This scenario indicates multiple YUV streams with same size
1815 * as input stream have been requested, since zsl stream handle
1816 * is solely use for the purpose of overriding the size of streams
1817 * which share h/w streams we will just make a guess here as to
1818 * which of the stream is a ZSL stream, this will be refactored
1819 * once we make generic logic for streams sharing encoder output
1820 */
1821 LOGH("Warning, Multiple ip/reprocess streams requested!");
1822 }
1823 zslStream = newStream;
1824 }
1825 }
1826 }
1827
1828 /* If a zsl stream is set, we know that we have configured at least one input or
1829 bidirectional stream */
1830 if (NULL != zslStream) {
1831 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1832 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1833 mInputStreamInfo.format = zslStream->format;
1834 mInputStreamInfo.usage = zslStream->usage;
1835 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1836 mInputStreamInfo.dim.width,
1837 mInputStreamInfo.dim.height,
1838 mInputStreamInfo.format, mInputStreamInfo.usage);
1839 }
1840
1841 cleanAndSortStreamInfo();
1842 if (mMetadataChannel) {
1843 delete mMetadataChannel;
1844 mMetadataChannel = NULL;
1845 }
1846 if (mSupportChannel) {
1847 delete mSupportChannel;
1848 mSupportChannel = NULL;
1849 }
1850
1851 if (mAnalysisChannel) {
1852 delete mAnalysisChannel;
1853 mAnalysisChannel = NULL;
1854 }
1855
1856 if (mDummyBatchChannel) {
1857 delete mDummyBatchChannel;
1858 mDummyBatchChannel = NULL;
1859 }
1860
1861 //Create metadata channel and initialize it
1862 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1863 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1864 gCamCapability[mCameraId]->color_arrangement);
1865 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1866 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001867 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 if (mMetadataChannel == NULL) {
1869 LOGE("failed to allocate metadata channel");
1870 rc = -ENOMEM;
1871 pthread_mutex_unlock(&mMutex);
1872 return rc;
1873 }
1874 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1875 if (rc < 0) {
1876 LOGE("metadata channel initialization failed");
1877 delete mMetadataChannel;
1878 mMetadataChannel = NULL;
1879 pthread_mutex_unlock(&mMutex);
1880 return rc;
1881 }
1882
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 bool isRawStreamRequested = false;
1884 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1885 /* Allocate channel objects for the requested streams */
1886 for (size_t i = 0; i < streamList->num_streams; i++) {
1887 camera3_stream_t *newStream = streamList->streams[i];
1888 uint32_t stream_usage = newStream->usage;
1889 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1890 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1891 struct camera_info *p_info = NULL;
1892 pthread_mutex_lock(&gCamLock);
1893 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1894 pthread_mutex_unlock(&gCamLock);
1895 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1896 || IS_USAGE_ZSL(newStream->usage)) &&
1897 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1898 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1899 if (bUseCommonFeatureMask) {
1900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1901 commonFeatureMask;
1902 } else {
1903 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1904 CAM_QCOM_FEATURE_NONE;
1905 }
1906
1907 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1908 LOGH("Input stream configured, reprocess config");
1909 } else {
1910 //for non zsl streams find out the format
1911 switch (newStream->format) {
1912 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1913 {
1914 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1915 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1916 /* add additional features to pp feature mask */
1917 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1918 mStreamConfigInfo.num_streams);
1919
1920 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1922 CAM_STREAM_TYPE_VIDEO;
1923 if (m_bTnrEnabled && m_bTnrVideo) {
1924 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1925 CAM_QCOM_FEATURE_CPP_TNR;
1926 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1927 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1928 ~CAM_QCOM_FEATURE_CDS;
1929 }
1930 } else {
1931 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1932 CAM_STREAM_TYPE_PREVIEW;
1933 if (m_bTnrEnabled && m_bTnrPreview) {
1934 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1935 CAM_QCOM_FEATURE_CPP_TNR;
1936 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1937 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1938 ~CAM_QCOM_FEATURE_CDS;
1939 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001940 if(!m_bSwTnrPreview) {
1941 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1942 ~CAM_QTI_FEATURE_SW_TNR;
1943 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001944 padding_info.width_padding = mSurfaceStridePadding;
1945 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001946 previewSize.width = (int32_t)newStream->width;
1947 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001948 }
1949 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1950 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1951 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1952 newStream->height;
1953 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1954 newStream->width;
1955 }
1956 }
1957 break;
1958 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1959 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1961 if (bUseCommonFeatureMask)
1962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1963 commonFeatureMask;
1964 else
1965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1966 CAM_QCOM_FEATURE_NONE;
1967 } else {
1968 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1969 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1970 }
1971 break;
1972 case HAL_PIXEL_FORMAT_BLOB:
1973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1974 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1975 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1976 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1977 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1978 } else {
1979 if (bUseCommonFeatureMask &&
1980 isOnEncoder(maxViewfinderSize, newStream->width,
1981 newStream->height)) {
1982 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1983 } else {
1984 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1985 }
1986 }
1987 if (isZsl) {
1988 if (zslStream) {
1989 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1990 (int32_t)zslStream->width;
1991 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1992 (int32_t)zslStream->height;
1993 } else {
1994 LOGE("Error, No ZSL stream identified");
1995 pthread_mutex_unlock(&mMutex);
1996 return -EINVAL;
1997 }
1998 } else if (m_bIs4KVideo) {
1999 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2001 } else if (bYuv888OverrideJpeg) {
2002 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2003 (int32_t)largeYuv888Size.width;
2004 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2005 (int32_t)largeYuv888Size.height;
2006 }
2007 break;
2008 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2009 case HAL_PIXEL_FORMAT_RAW16:
2010 case HAL_PIXEL_FORMAT_RAW10:
2011 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2012 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2013 isRawStreamRequested = true;
2014 break;
2015 default:
2016 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2017 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2018 break;
2019 }
2020 }
2021
2022 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2023 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2024 gCamCapability[mCameraId]->color_arrangement);
2025
2026 if (newStream->priv == NULL) {
2027 //New stream, construct channel
2028 switch (newStream->stream_type) {
2029 case CAMERA3_STREAM_INPUT:
2030 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2031 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2032 break;
2033 case CAMERA3_STREAM_BIDIRECTIONAL:
2034 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2035 GRALLOC_USAGE_HW_CAMERA_WRITE;
2036 break;
2037 case CAMERA3_STREAM_OUTPUT:
2038 /* For video encoding stream, set read/write rarely
2039 * flag so that they may be set to un-cached */
2040 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2041 newStream->usage |=
2042 (GRALLOC_USAGE_SW_READ_RARELY |
2043 GRALLOC_USAGE_SW_WRITE_RARELY |
2044 GRALLOC_USAGE_HW_CAMERA_WRITE);
2045 else if (IS_USAGE_ZSL(newStream->usage))
2046 {
2047 LOGD("ZSL usage flag skipping");
2048 }
2049 else if (newStream == zslStream
2050 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2051 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2052 } else
2053 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2054 break;
2055 default:
2056 LOGE("Invalid stream_type %d", newStream->stream_type);
2057 break;
2058 }
2059
2060 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2061 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2062 QCamera3ProcessingChannel *channel = NULL;
2063 switch (newStream->format) {
2064 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2065 if ((newStream->usage &
2066 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2067 (streamList->operation_mode ==
2068 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2069 ) {
2070 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2071 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002072 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002073 this,
2074 newStream,
2075 (cam_stream_type_t)
2076 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2077 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2078 mMetadataChannel,
2079 0); //heap buffers are not required for HFR video channel
2080 if (channel == NULL) {
2081 LOGE("allocation of channel failed");
2082 pthread_mutex_unlock(&mMutex);
2083 return -ENOMEM;
2084 }
2085 //channel->getNumBuffers() will return 0 here so use
2086 //MAX_INFLIGH_HFR_REQUESTS
2087 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2088 newStream->priv = channel;
2089 LOGI("num video buffers in HFR mode: %d",
2090 MAX_INFLIGHT_HFR_REQUESTS);
2091 } else {
2092 /* Copy stream contents in HFR preview only case to create
2093 * dummy batch channel so that sensor streaming is in
2094 * HFR mode */
2095 if (!m_bIsVideo && (streamList->operation_mode ==
2096 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2097 mDummyBatchStream = *newStream;
2098 }
2099 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2100 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002101 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002102 this,
2103 newStream,
2104 (cam_stream_type_t)
2105 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2106 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2107 mMetadataChannel,
2108 MAX_INFLIGHT_REQUESTS);
2109 if (channel == NULL) {
2110 LOGE("allocation of channel failed");
2111 pthread_mutex_unlock(&mMutex);
2112 return -ENOMEM;
2113 }
2114 newStream->max_buffers = channel->getNumBuffers();
2115 newStream->priv = channel;
2116 }
2117 break;
2118 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2119 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2120 mChannelHandle,
2121 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002122 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002123 this,
2124 newStream,
2125 (cam_stream_type_t)
2126 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2127 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2128 mMetadataChannel);
2129 if (channel == NULL) {
2130 LOGE("allocation of YUV channel failed");
2131 pthread_mutex_unlock(&mMutex);
2132 return -ENOMEM;
2133 }
2134 newStream->max_buffers = channel->getNumBuffers();
2135 newStream->priv = channel;
2136 break;
2137 }
2138 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2139 case HAL_PIXEL_FORMAT_RAW16:
2140 case HAL_PIXEL_FORMAT_RAW10:
2141 mRawChannel = new QCamera3RawChannel(
2142 mCameraHandle->camera_handle, mChannelHandle,
2143 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002144 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002145 this, newStream,
2146 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2147 mMetadataChannel,
2148 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2149 if (mRawChannel == NULL) {
2150 LOGE("allocation of raw channel failed");
2151 pthread_mutex_unlock(&mMutex);
2152 return -ENOMEM;
2153 }
2154 newStream->max_buffers = mRawChannel->getNumBuffers();
2155 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2156 break;
2157 case HAL_PIXEL_FORMAT_BLOB:
2158 // Max live snapshot inflight buffer is 1. This is to mitigate
2159 // frame drop issues for video snapshot. The more buffers being
2160 // allocated, the more frame drops there are.
2161 mPictureChannel = new QCamera3PicChannel(
2162 mCameraHandle->camera_handle, mChannelHandle,
2163 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002164 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002165 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2166 m_bIs4KVideo, isZsl, mMetadataChannel,
2167 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2168 if (mPictureChannel == NULL) {
2169 LOGE("allocation of channel failed");
2170 pthread_mutex_unlock(&mMutex);
2171 return -ENOMEM;
2172 }
2173 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2174 newStream->max_buffers = mPictureChannel->getNumBuffers();
2175 mPictureChannel->overrideYuvSize(
2176 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2177 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2178 break;
2179
2180 default:
2181 LOGE("not a supported format 0x%x", newStream->format);
2182 break;
2183 }
2184 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2185 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2186 } else {
2187 LOGE("Error, Unknown stream type");
2188 pthread_mutex_unlock(&mMutex);
2189 return -EINVAL;
2190 }
2191
2192 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2193 if (channel != NULL && channel->isUBWCEnabled()) {
2194 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002195 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2196 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002197 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2198 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2199 }
2200 }
2201
2202 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2203 it != mStreamInfo.end(); it++) {
2204 if ((*it)->stream == newStream) {
2205 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2206 break;
2207 }
2208 }
2209 } else {
2210 // Channel already exists for this stream
2211 // Do nothing for now
2212 }
2213 padding_info = gCamCapability[mCameraId]->padding_info;
2214
2215 /* Do not add entries for input stream in metastream info
2216 * since there is no real stream associated with it
2217 */
2218 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2219 mStreamConfigInfo.num_streams++;
2220 }
2221
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002222 // Create analysis stream all the time, even when h/w support is not available
2223 {
2224 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2225 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2226 gCamCapability[mCameraId]->color_arrangement);
2227 cam_analysis_info_t analysisInfo;
2228 int32_t ret = NO_ERROR;
2229 ret = mCommon.getAnalysisInfo(
2230 FALSE,
2231 analysisFeatureMask,
2232 &analysisInfo);
2233 if (ret == NO_ERROR) {
2234 cam_dimension_t analysisDim;
2235 analysisDim = mCommon.getMatchingDimension(previewSize,
2236 analysisInfo.analysis_recommended_res);
2237
2238 mAnalysisChannel = new QCamera3SupportChannel(
2239 mCameraHandle->camera_handle,
2240 mChannelHandle,
2241 mCameraHandle->ops,
2242 &analysisInfo.analysis_padding_info,
2243 analysisFeatureMask,
2244 CAM_STREAM_TYPE_ANALYSIS,
2245 &analysisDim,
2246 (analysisInfo.analysis_format
2247 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2248 : CAM_FORMAT_YUV_420_NV21),
2249 analysisInfo.hw_analysis_supported,
2250 gCamCapability[mCameraId]->color_arrangement,
2251 this,
2252 0); // force buffer count to 0
2253 } else {
2254 LOGW("getAnalysisInfo failed, ret = %d", ret);
2255 }
2256 if (!mAnalysisChannel) {
2257 LOGW("Analysis channel cannot be created");
2258 }
2259 }
2260
Thierry Strudel3d639192016-09-09 11:52:26 -07002261 //RAW DUMP channel
2262 if (mEnableRawDump && isRawStreamRequested == false){
2263 cam_dimension_t rawDumpSize;
2264 rawDumpSize = getMaxRawSize(mCameraId);
2265 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2266 setPAAFSupport(rawDumpFeatureMask,
2267 CAM_STREAM_TYPE_RAW,
2268 gCamCapability[mCameraId]->color_arrangement);
2269 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2270 mChannelHandle,
2271 mCameraHandle->ops,
2272 rawDumpSize,
2273 &padding_info,
2274 this, rawDumpFeatureMask);
2275 if (!mRawDumpChannel) {
2276 LOGE("Raw Dump channel cannot be created");
2277 pthread_mutex_unlock(&mMutex);
2278 return -ENOMEM;
2279 }
2280 }
2281
2282
2283 if (mAnalysisChannel) {
2284 cam_analysis_info_t analysisInfo;
2285 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2286 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2287 CAM_STREAM_TYPE_ANALYSIS;
2288 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2289 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2290 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2291 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2292 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002293 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002294 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2295 &analysisInfo);
2296 if (rc != NO_ERROR) {
2297 LOGE("getAnalysisInfo failed, ret = %d", rc);
2298 pthread_mutex_unlock(&mMutex);
2299 return rc;
2300 }
2301 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002302 mCommon.getMatchingDimension(previewSize,
2303 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002304 mStreamConfigInfo.num_streams++;
2305 }
2306
2307 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2308 cam_analysis_info_t supportInfo;
2309 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2310 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2311 setPAAFSupport(callbackFeatureMask,
2312 CAM_STREAM_TYPE_CALLBACK,
2313 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002314 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002315 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002316 if (ret != NO_ERROR) {
2317 /* Ignore the error for Mono camera
2318 * because the PAAF bit mask is only set
2319 * for CAM_STREAM_TYPE_ANALYSIS stream type
2320 */
2321 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2322 LOGW("getAnalysisInfo failed, ret = %d", ret);
2323 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002324 }
2325 mSupportChannel = new QCamera3SupportChannel(
2326 mCameraHandle->camera_handle,
2327 mChannelHandle,
2328 mCameraHandle->ops,
2329 &gCamCapability[mCameraId]->padding_info,
2330 callbackFeatureMask,
2331 CAM_STREAM_TYPE_CALLBACK,
2332 &QCamera3SupportChannel::kDim,
2333 CAM_FORMAT_YUV_420_NV21,
2334 supportInfo.hw_analysis_supported,
2335 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002336 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 if (!mSupportChannel) {
2338 LOGE("dummy channel cannot be created");
2339 pthread_mutex_unlock(&mMutex);
2340 return -ENOMEM;
2341 }
2342 }
2343
2344 if (mSupportChannel) {
2345 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2346 QCamera3SupportChannel::kDim;
2347 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2348 CAM_STREAM_TYPE_CALLBACK;
2349 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2350 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2351 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2352 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2353 gCamCapability[mCameraId]->color_arrangement);
2354 mStreamConfigInfo.num_streams++;
2355 }
2356
2357 if (mRawDumpChannel) {
2358 cam_dimension_t rawSize;
2359 rawSize = getMaxRawSize(mCameraId);
2360 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2361 rawSize;
2362 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2363 CAM_STREAM_TYPE_RAW;
2364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2365 CAM_QCOM_FEATURE_NONE;
2366 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2367 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2368 gCamCapability[mCameraId]->color_arrangement);
2369 mStreamConfigInfo.num_streams++;
2370 }
2371 /* In HFR mode, if video stream is not added, create a dummy channel so that
2372 * ISP can create a batch mode even for preview only case. This channel is
2373 * never 'start'ed (no stream-on), it is only 'initialized' */
2374 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2375 !m_bIsVideo) {
2376 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2377 setPAAFSupport(dummyFeatureMask,
2378 CAM_STREAM_TYPE_VIDEO,
2379 gCamCapability[mCameraId]->color_arrangement);
2380 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2381 mChannelHandle,
2382 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002383 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002384 this,
2385 &mDummyBatchStream,
2386 CAM_STREAM_TYPE_VIDEO,
2387 dummyFeatureMask,
2388 mMetadataChannel);
2389 if (NULL == mDummyBatchChannel) {
2390 LOGE("creation of mDummyBatchChannel failed."
2391 "Preview will use non-hfr sensor mode ");
2392 }
2393 }
2394 if (mDummyBatchChannel) {
2395 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2396 mDummyBatchStream.width;
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2398 mDummyBatchStream.height;
2399 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2400 CAM_STREAM_TYPE_VIDEO;
2401 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2402 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2403 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2404 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2405 gCamCapability[mCameraId]->color_arrangement);
2406 mStreamConfigInfo.num_streams++;
2407 }
2408
2409 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2410 mStreamConfigInfo.buffer_info.max_buffers =
2411 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2412
2413 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2414 for (pendingRequestIterator i = mPendingRequestsList.begin();
2415 i != mPendingRequestsList.end();) {
2416 i = erasePendingRequest(i);
2417 }
2418 mPendingFrameDropList.clear();
2419 // Initialize/Reset the pending buffers list
2420 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2421 req.mPendingBufferList.clear();
2422 }
2423 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2424
2425 mPendingReprocessResultList.clear();
2426
2427 mCurJpegMeta.clear();
2428 //Get min frame duration for this streams configuration
2429 deriveMinFrameDuration();
2430
2431 // Update state
2432 mState = CONFIGURED;
2433
2434 pthread_mutex_unlock(&mMutex);
2435
2436 return rc;
2437}
2438
2439/*===========================================================================
2440 * FUNCTION : validateCaptureRequest
2441 *
2442 * DESCRIPTION: validate a capture request from camera service
2443 *
2444 * PARAMETERS :
2445 * @request : request from framework to process
2446 *
2447 * RETURN :
2448 *
2449 *==========================================================================*/
2450int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002451 camera3_capture_request_t *request,
2452 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002453{
2454 ssize_t idx = 0;
2455 const camera3_stream_buffer_t *b;
2456 CameraMetadata meta;
2457
2458 /* Sanity check the request */
2459 if (request == NULL) {
2460 LOGE("NULL capture request");
2461 return BAD_VALUE;
2462 }
2463
2464 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2465 /*settings cannot be null for the first request*/
2466 return BAD_VALUE;
2467 }
2468
2469 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002470 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2471 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002472 LOGE("Request %d: No output buffers provided!",
2473 __FUNCTION__, frameNumber);
2474 return BAD_VALUE;
2475 }
2476 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2477 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2478 request->num_output_buffers, MAX_NUM_STREAMS);
2479 return BAD_VALUE;
2480 }
2481 if (request->input_buffer != NULL) {
2482 b = request->input_buffer;
2483 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2484 LOGE("Request %d: Buffer %ld: Status not OK!",
2485 frameNumber, (long)idx);
2486 return BAD_VALUE;
2487 }
2488 if (b->release_fence != -1) {
2489 LOGE("Request %d: Buffer %ld: Has a release fence!",
2490 frameNumber, (long)idx);
2491 return BAD_VALUE;
2492 }
2493 if (b->buffer == NULL) {
2494 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2495 frameNumber, (long)idx);
2496 return BAD_VALUE;
2497 }
2498 }
2499
2500 // Validate all buffers
2501 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002502 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 QCamera3ProcessingChannel *channel =
2504 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2505 if (channel == NULL) {
2506 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2507 frameNumber, (long)idx);
2508 return BAD_VALUE;
2509 }
2510 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2511 LOGE("Request %d: Buffer %ld: Status not OK!",
2512 frameNumber, (long)idx);
2513 return BAD_VALUE;
2514 }
2515 if (b->release_fence != -1) {
2516 LOGE("Request %d: Buffer %ld: Has a release fence!",
2517 frameNumber, (long)idx);
2518 return BAD_VALUE;
2519 }
2520 if (b->buffer == NULL) {
2521 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2522 frameNumber, (long)idx);
2523 return BAD_VALUE;
2524 }
2525 if (*(b->buffer) == NULL) {
2526 LOGE("Request %d: Buffer %ld: NULL private handle!",
2527 frameNumber, (long)idx);
2528 return BAD_VALUE;
2529 }
2530 idx++;
2531 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002533 return NO_ERROR;
2534}
2535
2536/*===========================================================================
2537 * FUNCTION : deriveMinFrameDuration
2538 *
2539 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2540 * on currently configured streams.
2541 *
2542 * PARAMETERS : NONE
2543 *
2544 * RETURN : NONE
2545 *
2546 *==========================================================================*/
2547void QCamera3HardwareInterface::deriveMinFrameDuration()
2548{
2549 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2550
2551 maxJpegDim = 0;
2552 maxProcessedDim = 0;
2553 maxRawDim = 0;
2554
2555 // Figure out maximum jpeg, processed, and raw dimensions
2556 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2557 it != mStreamInfo.end(); it++) {
2558
2559 // Input stream doesn't have valid stream_type
2560 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2561 continue;
2562
2563 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2564 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2565 if (dimension > maxJpegDim)
2566 maxJpegDim = dimension;
2567 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2568 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2569 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2570 if (dimension > maxRawDim)
2571 maxRawDim = dimension;
2572 } else {
2573 if (dimension > maxProcessedDim)
2574 maxProcessedDim = dimension;
2575 }
2576 }
2577
2578 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2579 MAX_SIZES_CNT);
2580
2581 //Assume all jpeg dimensions are in processed dimensions.
2582 if (maxJpegDim > maxProcessedDim)
2583 maxProcessedDim = maxJpegDim;
2584 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2585 if (maxProcessedDim > maxRawDim) {
2586 maxRawDim = INT32_MAX;
2587
2588 for (size_t i = 0; i < count; i++) {
2589 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2590 gCamCapability[mCameraId]->raw_dim[i].height;
2591 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2592 maxRawDim = dimension;
2593 }
2594 }
2595
2596 //Find minimum durations for processed, jpeg, and raw
2597 for (size_t i = 0; i < count; i++) {
2598 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2599 gCamCapability[mCameraId]->raw_dim[i].height) {
2600 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2601 break;
2602 }
2603 }
2604 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2605 for (size_t i = 0; i < count; i++) {
2606 if (maxProcessedDim ==
2607 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2608 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2609 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2610 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2611 break;
2612 }
2613 }
2614}
2615
2616/*===========================================================================
2617 * FUNCTION : getMinFrameDuration
2618 *
2619 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2620 * and current request configuration.
2621 *
2622 * PARAMETERS : @request: requset sent by the frameworks
2623 *
2624 * RETURN : min farme duration for a particular request
2625 *
2626 *==========================================================================*/
2627int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2628{
2629 bool hasJpegStream = false;
2630 bool hasRawStream = false;
2631 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2632 const camera3_stream_t *stream = request->output_buffers[i].stream;
2633 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2634 hasJpegStream = true;
2635 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2636 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2637 stream->format == HAL_PIXEL_FORMAT_RAW16)
2638 hasRawStream = true;
2639 }
2640
2641 if (!hasJpegStream)
2642 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2643 else
2644 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2645}
2646
2647/*===========================================================================
2648 * FUNCTION : handleBuffersDuringFlushLock
2649 *
2650 * DESCRIPTION: Account for buffers returned from back-end during flush
2651 * This function is executed while mMutex is held by the caller.
2652 *
2653 * PARAMETERS :
2654 * @buffer: image buffer for the callback
2655 *
2656 * RETURN :
2657 *==========================================================================*/
2658void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2659{
2660 bool buffer_found = false;
2661 for (List<PendingBuffersInRequest>::iterator req =
2662 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2663 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2664 for (List<PendingBufferInfo>::iterator i =
2665 req->mPendingBufferList.begin();
2666 i != req->mPendingBufferList.end(); i++) {
2667 if (i->buffer == buffer->buffer) {
2668 mPendingBuffersMap.numPendingBufsAtFlush--;
2669 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2670 buffer->buffer, req->frame_number,
2671 mPendingBuffersMap.numPendingBufsAtFlush);
2672 buffer_found = true;
2673 break;
2674 }
2675 }
2676 if (buffer_found) {
2677 break;
2678 }
2679 }
2680 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2681 //signal the flush()
2682 LOGD("All buffers returned to HAL. Continue flush");
2683 pthread_cond_signal(&mBuffersCond);
2684 }
2685}
2686
2687
2688/*===========================================================================
2689 * FUNCTION : handlePendingReprocResults
2690 *
2691 * DESCRIPTION: check and notify on any pending reprocess results
2692 *
2693 * PARAMETERS :
2694 * @frame_number : Pending request frame number
2695 *
2696 * RETURN : int32_t type of status
2697 * NO_ERROR -- success
2698 * none-zero failure code
2699 *==========================================================================*/
2700int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2701{
2702 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2703 j != mPendingReprocessResultList.end(); j++) {
2704 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002705 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002706
2707 LOGD("Delayed reprocess notify %d",
2708 frame_number);
2709
2710 for (pendingRequestIterator k = mPendingRequestsList.begin();
2711 k != mPendingRequestsList.end(); k++) {
2712
2713 if (k->frame_number == j->frame_number) {
2714 LOGD("Found reprocess frame number %d in pending reprocess List "
2715 "Take it out!!",
2716 k->frame_number);
2717
2718 camera3_capture_result result;
2719 memset(&result, 0, sizeof(camera3_capture_result));
2720 result.frame_number = frame_number;
2721 result.num_output_buffers = 1;
2722 result.output_buffers = &j->buffer;
2723 result.input_buffer = k->input_buffer;
2724 result.result = k->settings;
2725 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002726 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002727
2728 erasePendingRequest(k);
2729 break;
2730 }
2731 }
2732 mPendingReprocessResultList.erase(j);
2733 break;
2734 }
2735 }
2736 return NO_ERROR;
2737}
2738
2739/*===========================================================================
2740 * FUNCTION : handleBatchMetadata
2741 *
2742 * DESCRIPTION: Handles metadata buffer callback in batch mode
2743 *
2744 * PARAMETERS : @metadata_buf: metadata buffer
2745 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2746 * the meta buf in this method
2747 *
2748 * RETURN :
2749 *
2750 *==========================================================================*/
2751void QCamera3HardwareInterface::handleBatchMetadata(
2752 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002755
2756 if (NULL == metadata_buf) {
2757 LOGE("metadata_buf is NULL");
2758 return;
2759 }
2760 /* In batch mode, the metdata will contain the frame number and timestamp of
2761 * the last frame in the batch. Eg: a batch containing buffers from request
2762 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2763 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2764 * multiple process_capture_results */
2765 metadata_buffer_t *metadata =
2766 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2767 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2768 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2769 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2770 uint32_t frame_number = 0, urgent_frame_number = 0;
2771 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2772 bool invalid_metadata = false;
2773 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2774 size_t loopCount = 1;
2775
2776 int32_t *p_frame_number_valid =
2777 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2778 uint32_t *p_frame_number =
2779 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2780 int64_t *p_capture_time =
2781 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2782 int32_t *p_urgent_frame_number_valid =
2783 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2784 uint32_t *p_urgent_frame_number =
2785 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2786
2787 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2788 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2789 (NULL == p_urgent_frame_number)) {
2790 LOGE("Invalid metadata");
2791 invalid_metadata = true;
2792 } else {
2793 frame_number_valid = *p_frame_number_valid;
2794 last_frame_number = *p_frame_number;
2795 last_frame_capture_time = *p_capture_time;
2796 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2797 last_urgent_frame_number = *p_urgent_frame_number;
2798 }
2799
2800 /* In batchmode, when no video buffers are requested, set_parms are sent
2801 * for every capture_request. The difference between consecutive urgent
2802 * frame numbers and frame numbers should be used to interpolate the
2803 * corresponding frame numbers and time stamps */
2804 pthread_mutex_lock(&mMutex);
2805 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002806 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2807 if(idx < 0) {
2808 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2809 last_urgent_frame_number);
2810 mState = ERROR;
2811 pthread_mutex_unlock(&mMutex);
2812 return;
2813 }
2814 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002815 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2816 first_urgent_frame_number;
2817
2818 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2819 urgent_frame_number_valid,
2820 first_urgent_frame_number, last_urgent_frame_number);
2821 }
2822
2823 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002824 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2825 if(idx < 0) {
2826 LOGE("Invalid frame number received: %d. Irrecoverable error",
2827 last_frame_number);
2828 mState = ERROR;
2829 pthread_mutex_unlock(&mMutex);
2830 return;
2831 }
2832 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002833 frameNumDiff = last_frame_number + 1 -
2834 first_frame_number;
2835 mPendingBatchMap.removeItem(last_frame_number);
2836
2837 LOGD("frm: valid: %d frm_num: %d - %d",
2838 frame_number_valid,
2839 first_frame_number, last_frame_number);
2840
2841 }
2842 pthread_mutex_unlock(&mMutex);
2843
2844 if (urgent_frame_number_valid || frame_number_valid) {
2845 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2846 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2847 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2848 urgentFrameNumDiff, last_urgent_frame_number);
2849 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2850 LOGE("frameNumDiff: %d frameNum: %d",
2851 frameNumDiff, last_frame_number);
2852 }
2853
2854 for (size_t i = 0; i < loopCount; i++) {
2855 /* handleMetadataWithLock is called even for invalid_metadata for
2856 * pipeline depth calculation */
2857 if (!invalid_metadata) {
2858 /* Infer frame number. Batch metadata contains frame number of the
2859 * last frame */
2860 if (urgent_frame_number_valid) {
2861 if (i < urgentFrameNumDiff) {
2862 urgent_frame_number =
2863 first_urgent_frame_number + i;
2864 LOGD("inferred urgent frame_number: %d",
2865 urgent_frame_number);
2866 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2867 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2868 } else {
2869 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2870 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2871 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2872 }
2873 }
2874
2875 /* Infer frame number. Batch metadata contains frame number of the
2876 * last frame */
2877 if (frame_number_valid) {
2878 if (i < frameNumDiff) {
2879 frame_number = first_frame_number + i;
2880 LOGD("inferred frame_number: %d", frame_number);
2881 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2882 CAM_INTF_META_FRAME_NUMBER, frame_number);
2883 } else {
2884 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2885 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2886 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2887 }
2888 }
2889
2890 if (last_frame_capture_time) {
2891 //Infer timestamp
2892 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002893 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002894 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002895 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002896 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2897 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2898 LOGD("batch capture_time: %lld, capture_time: %lld",
2899 last_frame_capture_time, capture_time);
2900 }
2901 }
2902 pthread_mutex_lock(&mMutex);
2903 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002904 false /* free_and_bufdone_meta_buf */,
2905 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002906 pthread_mutex_unlock(&mMutex);
2907 }
2908
2909 /* BufDone metadata buffer */
2910 if (free_and_bufdone_meta_buf) {
2911 mMetadataChannel->bufDone(metadata_buf);
2912 free(metadata_buf);
2913 }
2914}
2915
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002916void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2917 camera3_error_msg_code_t errorCode)
2918{
2919 camera3_notify_msg_t notify_msg;
2920 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2921 notify_msg.type = CAMERA3_MSG_ERROR;
2922 notify_msg.message.error.error_code = errorCode;
2923 notify_msg.message.error.error_stream = NULL;
2924 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002925 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002926
2927 return;
2928}
Thierry Strudel3d639192016-09-09 11:52:26 -07002929/*===========================================================================
2930 * FUNCTION : handleMetadataWithLock
2931 *
2932 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2933 *
2934 * PARAMETERS : @metadata_buf: metadata buffer
2935 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2936 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002937 * @firstMetadataInBatch: Boolean to indicate whether this is the
2938 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002939 *
2940 * RETURN :
2941 *
2942 *==========================================================================*/
2943void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002944 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2945 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002946{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002947 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002948 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2949 //during flush do not send metadata from this thread
2950 LOGD("not sending metadata during flush or when mState is error");
2951 if (free_and_bufdone_meta_buf) {
2952 mMetadataChannel->bufDone(metadata_buf);
2953 free(metadata_buf);
2954 }
2955 return;
2956 }
2957
2958 //not in flush
2959 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2960 int32_t frame_number_valid, urgent_frame_number_valid;
2961 uint32_t frame_number, urgent_frame_number;
2962 int64_t capture_time;
2963 nsecs_t currentSysTime;
2964
2965 int32_t *p_frame_number_valid =
2966 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2967 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2968 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2969 int32_t *p_urgent_frame_number_valid =
2970 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2971 uint32_t *p_urgent_frame_number =
2972 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2973 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2974 metadata) {
2975 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2976 *p_frame_number_valid, *p_frame_number);
2977 }
2978
2979 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2980 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2981 LOGE("Invalid metadata");
2982 if (free_and_bufdone_meta_buf) {
2983 mMetadataChannel->bufDone(metadata_buf);
2984 free(metadata_buf);
2985 }
2986 goto done_metadata;
2987 }
2988 frame_number_valid = *p_frame_number_valid;
2989 frame_number = *p_frame_number;
2990 capture_time = *p_capture_time;
2991 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2992 urgent_frame_number = *p_urgent_frame_number;
2993 currentSysTime = systemTime(CLOCK_MONOTONIC);
2994
2995 // Detect if buffers from any requests are overdue
2996 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2997 if ( (currentSysTime - req.timestamp) >
2998 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2999 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003000 assert(missed.stream->priv);
3001 if (missed.stream->priv) {
3002 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3003 assert(ch->mStreams[0]);
3004 if (ch->mStreams[0]) {
3005 LOGE("Cancel missing frame = %d, buffer = %p,"
3006 "stream type = %d, stream format = %d",
3007 req.frame_number, missed.buffer,
3008 ch->mStreams[0]->getMyType(), missed.stream->format);
3009 ch->timeoutFrame(req.frame_number);
3010 }
3011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003012 }
3013 }
3014 }
3015 //Partial result on process_capture_result for timestamp
3016 if (urgent_frame_number_valid) {
3017 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3018 urgent_frame_number, capture_time);
3019
3020 //Recieved an urgent Frame Number, handle it
3021 //using partial results
3022 for (pendingRequestIterator i =
3023 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3024 LOGD("Iterator Frame = %d urgent frame = %d",
3025 i->frame_number, urgent_frame_number);
3026
3027 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3028 (i->partial_result_cnt == 0)) {
3029 LOGE("Error: HAL missed urgent metadata for frame number %d",
3030 i->frame_number);
3031 }
3032
3033 if (i->frame_number == urgent_frame_number &&
3034 i->bUrgentReceived == 0) {
3035
3036 camera3_capture_result_t result;
3037 memset(&result, 0, sizeof(camera3_capture_result_t));
3038
3039 i->partial_result_cnt++;
3040 i->bUrgentReceived = 1;
3041 // Extract 3A metadata
3042 result.result =
3043 translateCbUrgentMetadataToResultMetadata(metadata);
3044 // Populate metadata result
3045 result.frame_number = urgent_frame_number;
3046 result.num_output_buffers = 0;
3047 result.output_buffers = NULL;
3048 result.partial_result = i->partial_result_cnt;
3049
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003050 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003051 LOGD("urgent frame_number = %u, capture_time = %lld",
3052 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003053 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3054 // Instant AEC settled for this frame.
3055 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3056 mInstantAECSettledFrameNumber = urgent_frame_number;
3057 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 free_camera_metadata((camera_metadata_t *)result.result);
3059 break;
3060 }
3061 }
3062 }
3063
3064 if (!frame_number_valid) {
3065 LOGD("Not a valid normal frame number, used as SOF only");
3066 if (free_and_bufdone_meta_buf) {
3067 mMetadataChannel->bufDone(metadata_buf);
3068 free(metadata_buf);
3069 }
3070 goto done_metadata;
3071 }
3072 LOGH("valid frame_number = %u, capture_time = %lld",
3073 frame_number, capture_time);
3074
3075 for (pendingRequestIterator i = mPendingRequestsList.begin();
3076 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3077 // Flush out all entries with less or equal frame numbers.
3078
3079 camera3_capture_result_t result;
3080 memset(&result, 0, sizeof(camera3_capture_result_t));
3081
3082 LOGD("frame_number in the list is %u", i->frame_number);
3083 i->partial_result_cnt++;
3084 result.partial_result = i->partial_result_cnt;
3085
3086 // Check whether any stream buffer corresponding to this is dropped or not
3087 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003088 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3089 if (p_cam_frame_drop ||
3090 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003091 /* Clear notify_msg structure */
3092 camera3_notify_msg_t notify_msg;
3093 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3094 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3095 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003096 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003097 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3098 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003099 if (p_cam_frame_drop) {
3100 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003101 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003102 // Got the stream ID for drop frame.
3103 dropFrame = true;
3104 break;
3105 }
3106 }
3107 } else {
3108 // This is instant AEC case.
3109 // For instant AEC drop the stream untill AEC is settled.
3110 dropFrame = true;
3111 }
3112 if (dropFrame) {
3113 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3114 if (p_cam_frame_drop) {
3115 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003116 LOGE("Start of reporting error frame#=%u, streamID=%u",
3117 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003118 } else {
3119 // For instant AEC, inform frame drop and frame number
3120 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3121 "AEC settled frame number = %u",
3122 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3123 }
3124 notify_msg.type = CAMERA3_MSG_ERROR;
3125 notify_msg.message.error.frame_number = i->frame_number;
3126 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3127 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003128 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003129 if (p_cam_frame_drop) {
3130 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003131 LOGE("End of reporting error frame#=%u, streamID=%u",
3132 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003133 } else {
3134 // For instant AEC, inform frame drop and frame number
3135 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3136 "AEC settled frame number = %u",
3137 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3138 }
3139 PendingFrameDropInfo PendingFrameDrop;
3140 PendingFrameDrop.frame_number=i->frame_number;
3141 PendingFrameDrop.stream_ID = streamID;
3142 // Add the Frame drop info to mPendingFrameDropList
3143 mPendingFrameDropList.push_back(PendingFrameDrop);
3144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003145 }
3146 }
3147
3148 // Send empty metadata with already filled buffers for dropped metadata
3149 // and send valid metadata with already filled buffers for current metadata
3150 /* we could hit this case when we either
3151 * 1. have a pending reprocess request or
3152 * 2. miss a metadata buffer callback */
3153 if (i->frame_number < frame_number) {
3154 if (i->input_buffer) {
3155 /* this will be handled in handleInputBufferWithLock */
3156 i++;
3157 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003158 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003159
3160 mPendingLiveRequest--;
3161
3162 CameraMetadata dummyMetadata;
3163 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3164 result.result = dummyMetadata.release();
3165
3166 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003167 }
3168 } else {
3169 mPendingLiveRequest--;
3170 /* Clear notify_msg structure */
3171 camera3_notify_msg_t notify_msg;
3172 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3173
3174 // Send shutter notify to frameworks
3175 notify_msg.type = CAMERA3_MSG_SHUTTER;
3176 notify_msg.message.shutter.frame_number = i->frame_number;
3177 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003179
3180 i->timestamp = capture_time;
3181
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003182 /* Set the timestamp in display metadata so that clients aware of
3183 private_handle such as VT can use this un-modified timestamps.
3184 Camera framework is unaware of this timestamp and cannot change this */
3185 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3186
Thierry Strudel3d639192016-09-09 11:52:26 -07003187 // Find channel requiring metadata, meaning internal offline postprocess
3188 // is needed.
3189 //TODO: for now, we don't support two streams requiring metadata at the same time.
3190 // (because we are not making copies, and metadata buffer is not reference counted.
3191 bool internalPproc = false;
3192 for (pendingBufferIterator iter = i->buffers.begin();
3193 iter != i->buffers.end(); iter++) {
3194 if (iter->need_metadata) {
3195 internalPproc = true;
3196 QCamera3ProcessingChannel *channel =
3197 (QCamera3ProcessingChannel *)iter->stream->priv;
3198 channel->queueReprocMetadata(metadata_buf);
3199 break;
3200 }
3201 }
3202
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003203 for (auto itr = i->internalRequestList.begin();
3204 itr != i->internalRequestList.end(); itr++) {
3205 if (itr->need_metadata) {
3206 internalPproc = true;
3207 QCamera3ProcessingChannel *channel =
3208 (QCamera3ProcessingChannel *)itr->stream->priv;
3209 channel->queueReprocMetadata(metadata_buf);
3210 break;
3211 }
3212 }
3213
3214
Thierry Strudel3d639192016-09-09 11:52:26 -07003215 result.result = translateFromHalMetadata(metadata,
3216 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003217 i->capture_intent,
3218 /* DevCamDebug metadata translateFromHalMetadata function call*/
3219 i->DevCamDebug_meta_enable,
3220 /* DevCamDebug metadata end */
3221 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003222 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003223
3224 saveExifParams(metadata);
3225
3226 if (i->blob_request) {
3227 {
3228 //Dump tuning metadata if enabled and available
3229 char prop[PROPERTY_VALUE_MAX];
3230 memset(prop, 0, sizeof(prop));
3231 property_get("persist.camera.dumpmetadata", prop, "0");
3232 int32_t enabled = atoi(prop);
3233 if (enabled && metadata->is_tuning_params_valid) {
3234 dumpMetadataToFile(metadata->tuning_params,
3235 mMetaFrameCount,
3236 enabled,
3237 "Snapshot",
3238 frame_number);
3239 }
3240 }
3241 }
3242
3243 if (!internalPproc) {
3244 LOGD("couldn't find need_metadata for this metadata");
3245 // Return metadata buffer
3246 if (free_and_bufdone_meta_buf) {
3247 mMetadataChannel->bufDone(metadata_buf);
3248 free(metadata_buf);
3249 }
3250 }
3251 }
3252 if (!result.result) {
3253 LOGE("metadata is NULL");
3254 }
3255 result.frame_number = i->frame_number;
3256 result.input_buffer = i->input_buffer;
3257 result.num_output_buffers = 0;
3258 result.output_buffers = NULL;
3259 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3260 j != i->buffers.end(); j++) {
3261 if (j->buffer) {
3262 result.num_output_buffers++;
3263 }
3264 }
3265
3266 updateFpsInPreviewBuffer(metadata, i->frame_number);
3267
3268 if (result.num_output_buffers > 0) {
3269 camera3_stream_buffer_t *result_buffers =
3270 new camera3_stream_buffer_t[result.num_output_buffers];
3271 if (result_buffers != NULL) {
3272 size_t result_buffers_idx = 0;
3273 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3274 j != i->buffers.end(); j++) {
3275 if (j->buffer) {
3276 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3277 m != mPendingFrameDropList.end(); m++) {
3278 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3279 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3280 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3281 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3282 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3283 frame_number, streamID);
3284 m = mPendingFrameDropList.erase(m);
3285 break;
3286 }
3287 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003288 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003289 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3290 result_buffers[result_buffers_idx++] = *(j->buffer);
3291 free(j->buffer);
3292 j->buffer = NULL;
3293 }
3294 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003295
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003297 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003298 LOGD("meta frame_number = %u, capture_time = %lld",
3299 result.frame_number, i->timestamp);
3300 free_camera_metadata((camera_metadata_t *)result.result);
3301 delete[] result_buffers;
3302 }else {
3303 LOGE("Fatal error: out of memory");
3304 }
3305 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003306 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003307 LOGD("meta frame_number = %u, capture_time = %lld",
3308 result.frame_number, i->timestamp);
3309 free_camera_metadata((camera_metadata_t *)result.result);
3310 }
3311
3312 i = erasePendingRequest(i);
3313
3314 if (!mPendingReprocessResultList.empty()) {
3315 handlePendingReprocResults(frame_number + 1);
3316 }
3317 }
3318
3319done_metadata:
3320 for (pendingRequestIterator i = mPendingRequestsList.begin();
3321 i != mPendingRequestsList.end() ;i++) {
3322 i->pipeline_depth++;
3323 }
3324 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3325 unblockRequestIfNecessary();
3326}
3327
3328/*===========================================================================
3329 * FUNCTION : hdrPlusPerfLock
3330 *
3331 * DESCRIPTION: perf lock for HDR+ using custom intent
3332 *
3333 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3334 *
3335 * RETURN : None
3336 *
3337 *==========================================================================*/
3338void QCamera3HardwareInterface::hdrPlusPerfLock(
3339 mm_camera_super_buf_t *metadata_buf)
3340{
3341 if (NULL == metadata_buf) {
3342 LOGE("metadata_buf is NULL");
3343 return;
3344 }
3345 metadata_buffer_t *metadata =
3346 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3347 int32_t *p_frame_number_valid =
3348 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3349 uint32_t *p_frame_number =
3350 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3351
3352 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3353 LOGE("%s: Invalid metadata", __func__);
3354 return;
3355 }
3356
3357 //acquire perf lock for 5 sec after the last HDR frame is captured
3358 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3359 if ((p_frame_number != NULL) &&
3360 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003361 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003362 }
3363 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003364}
3365
3366/*===========================================================================
3367 * FUNCTION : handleInputBufferWithLock
3368 *
3369 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3370 *
3371 * PARAMETERS : @frame_number: frame number of the input buffer
3372 *
3373 * RETURN :
3374 *
3375 *==========================================================================*/
3376void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3377{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003378 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003379 pendingRequestIterator i = mPendingRequestsList.begin();
3380 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3381 i++;
3382 }
3383 if (i != mPendingRequestsList.end() && i->input_buffer) {
3384 //found the right request
3385 if (!i->shutter_notified) {
3386 CameraMetadata settings;
3387 camera3_notify_msg_t notify_msg;
3388 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3389 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3390 if(i->settings) {
3391 settings = i->settings;
3392 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3393 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3394 } else {
3395 LOGE("No timestamp in input settings! Using current one.");
3396 }
3397 } else {
3398 LOGE("Input settings missing!");
3399 }
3400
3401 notify_msg.type = CAMERA3_MSG_SHUTTER;
3402 notify_msg.message.shutter.frame_number = frame_number;
3403 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003404 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003405 i->shutter_notified = true;
3406 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3407 i->frame_number, notify_msg.message.shutter.timestamp);
3408 }
3409
3410 if (i->input_buffer->release_fence != -1) {
3411 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3412 close(i->input_buffer->release_fence);
3413 if (rc != OK) {
3414 LOGE("input buffer sync wait failed %d", rc);
3415 }
3416 }
3417
3418 camera3_capture_result result;
3419 memset(&result, 0, sizeof(camera3_capture_result));
3420 result.frame_number = frame_number;
3421 result.result = i->settings;
3422 result.input_buffer = i->input_buffer;
3423 result.partial_result = PARTIAL_RESULT_COUNT;
3424
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003425 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 LOGD("Input request metadata and input buffer frame_number = %u",
3427 i->frame_number);
3428 i = erasePendingRequest(i);
3429 } else {
3430 LOGE("Could not find input request for frame number %d", frame_number);
3431 }
3432}
3433
3434/*===========================================================================
3435 * FUNCTION : handleBufferWithLock
3436 *
3437 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3438 *
3439 * PARAMETERS : @buffer: image buffer for the callback
3440 * @frame_number: frame number of the image buffer
3441 *
3442 * RETURN :
3443 *
3444 *==========================================================================*/
3445void QCamera3HardwareInterface::handleBufferWithLock(
3446 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3447{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003448 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003449
3450 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3451 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3452 }
3453
Thierry Strudel3d639192016-09-09 11:52:26 -07003454 /* Nothing to be done during error state */
3455 if ((ERROR == mState) || (DEINIT == mState)) {
3456 return;
3457 }
3458 if (mFlushPerf) {
3459 handleBuffersDuringFlushLock(buffer);
3460 return;
3461 }
3462 //not in flush
3463 // If the frame number doesn't exist in the pending request list,
3464 // directly send the buffer to the frameworks, and update pending buffers map
3465 // Otherwise, book-keep the buffer.
3466 pendingRequestIterator i = mPendingRequestsList.begin();
3467 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3468 i++;
3469 }
3470 if (i == mPendingRequestsList.end()) {
3471 // Verify all pending requests frame_numbers are greater
3472 for (pendingRequestIterator j = mPendingRequestsList.begin();
3473 j != mPendingRequestsList.end(); j++) {
3474 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3475 LOGW("Error: pending live frame number %d is smaller than %d",
3476 j->frame_number, frame_number);
3477 }
3478 }
3479 camera3_capture_result_t result;
3480 memset(&result, 0, sizeof(camera3_capture_result_t));
3481 result.result = NULL;
3482 result.frame_number = frame_number;
3483 result.num_output_buffers = 1;
3484 result.partial_result = 0;
3485 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3486 m != mPendingFrameDropList.end(); m++) {
3487 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3488 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3489 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3490 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3491 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3492 frame_number, streamID);
3493 m = mPendingFrameDropList.erase(m);
3494 break;
3495 }
3496 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003497 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 result.output_buffers = buffer;
3499 LOGH("result frame_number = %d, buffer = %p",
3500 frame_number, buffer->buffer);
3501
3502 mPendingBuffersMap.removeBuf(buffer->buffer);
3503
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003504 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003505 } else {
3506 if (i->input_buffer) {
3507 CameraMetadata settings;
3508 camera3_notify_msg_t notify_msg;
3509 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3510 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3511 if(i->settings) {
3512 settings = i->settings;
3513 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3514 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3515 } else {
3516 LOGW("No timestamp in input settings! Using current one.");
3517 }
3518 } else {
3519 LOGE("Input settings missing!");
3520 }
3521
3522 notify_msg.type = CAMERA3_MSG_SHUTTER;
3523 notify_msg.message.shutter.frame_number = frame_number;
3524 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3525
3526 if (i->input_buffer->release_fence != -1) {
3527 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3528 close(i->input_buffer->release_fence);
3529 if (rc != OK) {
3530 LOGE("input buffer sync wait failed %d", rc);
3531 }
3532 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003533 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003534 mPendingBuffersMap.removeBuf(buffer->buffer);
3535
Thierry Strudel04e026f2016-10-10 11:27:36 -07003536 camera3_capture_result result;
3537 memset(&result, 0, sizeof(camera3_capture_result));
3538 result.frame_number = frame_number;
3539 result.result = i->settings;
3540 result.input_buffer = i->input_buffer;
3541 result.num_output_buffers = 1;
3542 result.output_buffers = buffer;
3543 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003544
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003545 orchestrateNotify(&notify_msg);
3546 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003547 LOGD("Notify reprocess now %d!", frame_number);
3548 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003549 } else {
3550 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3551 j != i->buffers.end(); j++) {
3552 if (j->stream == buffer->stream) {
3553 if (j->buffer != NULL) {
3554 LOGE("Error: buffer is already set");
3555 } else {
3556 j->buffer = (camera3_stream_buffer_t *)malloc(
3557 sizeof(camera3_stream_buffer_t));
3558 *(j->buffer) = *buffer;
3559 LOGH("cache buffer %p at result frame_number %u",
3560 buffer->buffer, frame_number);
3561 }
3562 }
3563 }
3564 }
3565 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003566
3567 if (mPreviewStarted == false) {
3568 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3569 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3570 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3571 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3572 mPreviewStarted = true;
3573
3574 // Set power hint for preview
3575 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3576 }
3577 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003578}
3579
3580/*===========================================================================
3581 * FUNCTION : unblockRequestIfNecessary
3582 *
3583 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3584 * that mMutex is held when this function is called.
3585 *
3586 * PARAMETERS :
3587 *
3588 * RETURN :
3589 *
3590 *==========================================================================*/
3591void QCamera3HardwareInterface::unblockRequestIfNecessary()
3592{
3593 // Unblock process_capture_request
3594 pthread_cond_signal(&mRequestCond);
3595}
3596
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003597/*===========================================================================
3598 * FUNCTION : isHdrSnapshotRequest
3599 *
3600 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3601 *
3602 * PARAMETERS : camera3 request structure
3603 *
3604 * RETURN : boolean decision variable
3605 *
3606 *==========================================================================*/
3607bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3608{
3609 if (request == NULL) {
3610 LOGE("Invalid request handle");
3611 assert(0);
3612 return false;
3613 }
3614
3615 if (!mForceHdrSnapshot) {
3616 CameraMetadata frame_settings;
3617 frame_settings = request->settings;
3618
3619 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3620 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3621 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3622 return false;
3623 }
3624 } else {
3625 return false;
3626 }
3627
3628 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3629 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3630 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3631 return false;
3632 }
3633 } else {
3634 return false;
3635 }
3636 }
3637
3638 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3639 if (request->output_buffers[i].stream->format
3640 == HAL_PIXEL_FORMAT_BLOB) {
3641 return true;
3642 }
3643 }
3644
3645 return false;
3646}
3647/*===========================================================================
3648 * FUNCTION : orchestrateRequest
3649 *
3650 * DESCRIPTION: Orchestrates a capture request from camera service
3651 *
3652 * PARAMETERS :
3653 * @request : request from framework to process
3654 *
3655 * RETURN : Error status codes
3656 *
3657 *==========================================================================*/
3658int32_t QCamera3HardwareInterface::orchestrateRequest(
3659 camera3_capture_request_t *request)
3660{
3661
3662 uint32_t originalFrameNumber = request->frame_number;
3663 uint32_t originalOutputCount = request->num_output_buffers;
3664 const camera_metadata_t *original_settings = request->settings;
3665 List<InternalRequest> internallyRequestedStreams;
3666 List<InternalRequest> emptyInternalList;
3667
3668 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3669 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3670 uint32_t internalFrameNumber;
3671 CameraMetadata modified_meta;
3672
3673
3674 /* Add Blob channel to list of internally requested streams */
3675 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3676 if (request->output_buffers[i].stream->format
3677 == HAL_PIXEL_FORMAT_BLOB) {
3678 InternalRequest streamRequested;
3679 streamRequested.meteringOnly = 1;
3680 streamRequested.need_metadata = 0;
3681 streamRequested.stream = request->output_buffers[i].stream;
3682 internallyRequestedStreams.push_back(streamRequested);
3683 }
3684 }
3685 request->num_output_buffers = 0;
3686 auto itr = internallyRequestedStreams.begin();
3687
3688 /* Modify setting to set compensation */
3689 modified_meta = request->settings;
3690 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3691 uint8_t aeLock = 1;
3692 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3693 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3694 camera_metadata_t *modified_settings = modified_meta.release();
3695 request->settings = modified_settings;
3696
3697 /* Capture Settling & -2x frame */
3698 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3699 request->frame_number = internalFrameNumber;
3700 processCaptureRequest(request, internallyRequestedStreams);
3701
3702 request->num_output_buffers = originalOutputCount;
3703 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3704 request->frame_number = internalFrameNumber;
3705 processCaptureRequest(request, emptyInternalList);
3706 request->num_output_buffers = 0;
3707
3708 modified_meta = modified_settings;
3709 expCompensation = 0;
3710 aeLock = 1;
3711 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3712 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3713 modified_settings = modified_meta.release();
3714 request->settings = modified_settings;
3715
3716 /* Capture Settling & 0X frame */
3717
3718 itr = internallyRequestedStreams.begin();
3719 if (itr == internallyRequestedStreams.end()) {
3720 LOGE("Error Internally Requested Stream list is empty");
3721 assert(0);
3722 } else {
3723 itr->need_metadata = 0;
3724 itr->meteringOnly = 1;
3725 }
3726
3727 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3728 request->frame_number = internalFrameNumber;
3729 processCaptureRequest(request, internallyRequestedStreams);
3730
3731 itr = internallyRequestedStreams.begin();
3732 if (itr == internallyRequestedStreams.end()) {
3733 ALOGE("Error Internally Requested Stream list is empty");
3734 assert(0);
3735 } else {
3736 itr->need_metadata = 1;
3737 itr->meteringOnly = 0;
3738 }
3739
3740 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3741 request->frame_number = internalFrameNumber;
3742 processCaptureRequest(request, internallyRequestedStreams);
3743
3744 /* Capture 2X frame*/
3745 modified_meta = modified_settings;
3746 expCompensation = GB_HDR_2X_STEP_EV;
3747 aeLock = 1;
3748 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3749 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3750 modified_settings = modified_meta.release();
3751 request->settings = modified_settings;
3752
3753 itr = internallyRequestedStreams.begin();
3754 if (itr == internallyRequestedStreams.end()) {
3755 ALOGE("Error Internally Requested Stream list is empty");
3756 assert(0);
3757 } else {
3758 itr->need_metadata = 0;
3759 itr->meteringOnly = 1;
3760 }
3761 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3762 request->frame_number = internalFrameNumber;
3763 processCaptureRequest(request, internallyRequestedStreams);
3764
3765 itr = internallyRequestedStreams.begin();
3766 if (itr == internallyRequestedStreams.end()) {
3767 ALOGE("Error Internally Requested Stream list is empty");
3768 assert(0);
3769 } else {
3770 itr->need_metadata = 1;
3771 itr->meteringOnly = 0;
3772 }
3773
3774 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3775 request->frame_number = internalFrameNumber;
3776 processCaptureRequest(request, internallyRequestedStreams);
3777
3778
3779 /* Capture 2X on original streaming config*/
3780 internallyRequestedStreams.clear();
3781
3782 /* Restore original settings pointer */
3783 request->settings = original_settings;
3784 } else {
3785 uint32_t internalFrameNumber;
3786 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3787 request->frame_number = internalFrameNumber;
3788 return processCaptureRequest(request, internallyRequestedStreams);
3789 }
3790
3791 return NO_ERROR;
3792}
3793
3794/*===========================================================================
3795 * FUNCTION : orchestrateResult
3796 *
3797 * DESCRIPTION: Orchestrates a capture result to camera service
3798 *
3799 * PARAMETERS :
3800 * @request : request from framework to process
3801 *
3802 * RETURN :
3803 *
3804 *==========================================================================*/
3805void QCamera3HardwareInterface::orchestrateResult(
3806 camera3_capture_result_t *result)
3807{
3808 uint32_t frameworkFrameNumber;
3809 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3810 frameworkFrameNumber);
3811 if (rc != NO_ERROR) {
3812 LOGE("Cannot find translated frameworkFrameNumber");
3813 assert(0);
3814 } else {
3815 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3816 LOGD("CAM_DEBUG Internal Request drop the result");
3817 } else {
3818 result->frame_number = frameworkFrameNumber;
3819 mCallbackOps->process_capture_result(mCallbackOps, result);
3820 }
3821 }
3822}
3823
3824/*===========================================================================
3825 * FUNCTION : orchestrateNotify
3826 *
3827 * DESCRIPTION: Orchestrates a notify to camera service
3828 *
3829 * PARAMETERS :
3830 * @request : request from framework to process
3831 *
3832 * RETURN :
3833 *
3834 *==========================================================================*/
3835void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3836{
3837 uint32_t frameworkFrameNumber;
3838 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3839 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3840 frameworkFrameNumber);
3841 if (rc != NO_ERROR) {
3842 LOGE("Cannot find translated frameworkFrameNumber");
3843 assert(0);
3844 } else {
3845 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3846 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3847 } else {
3848 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3849 mCallbackOps->notify(mCallbackOps, notify_msg);
3850 }
3851 }
3852}
3853
3854/*===========================================================================
3855 * FUNCTION : FrameNumberRegistry
3856 *
3857 * DESCRIPTION: Constructor
3858 *
3859 * PARAMETERS :
3860 *
3861 * RETURN :
3862 *
3863 *==========================================================================*/
3864FrameNumberRegistry::FrameNumberRegistry()
3865{
3866 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3867}
3868
3869/*===========================================================================
3870 * FUNCTION : ~FrameNumberRegistry
3871 *
3872 * DESCRIPTION: Destructor
3873 *
3874 * PARAMETERS :
3875 *
3876 * RETURN :
3877 *
3878 *==========================================================================*/
3879FrameNumberRegistry::~FrameNumberRegistry()
3880{
3881}
3882
3883/*===========================================================================
3884 * FUNCTION : PurgeOldEntriesLocked
3885 *
3886 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3887 *
3888 * PARAMETERS :
3889 *
3890 * RETURN : NONE
3891 *
3892 *==========================================================================*/
3893void FrameNumberRegistry::purgeOldEntriesLocked()
3894{
3895 while (_register.begin() != _register.end()) {
3896 auto itr = _register.begin();
3897 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3898 _register.erase(itr);
3899 } else {
3900 return;
3901 }
3902 }
3903}
3904
3905/*===========================================================================
3906 * FUNCTION : allocStoreInternalFrameNumber
3907 *
3908 * DESCRIPTION: Method to note down a framework request and associate a new
3909 * internal request number against it
3910 *
3911 * PARAMETERS :
3912 * @fFrameNumber: Identifier given by framework
3913 * @internalFN : Output parameter which will have the newly generated internal
3914 * entry
3915 *
3916 * RETURN : Error code
3917 *
3918 *==========================================================================*/
3919int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3920 uint32_t &internalFrameNumber)
3921{
3922 Mutex::Autolock lock(mRegistryLock);
3923 internalFrameNumber = _nextFreeInternalNumber++;
3924 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3925 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3926 purgeOldEntriesLocked();
3927 return NO_ERROR;
3928}
3929
3930/*===========================================================================
3931 * FUNCTION : generateStoreInternalFrameNumber
3932 *
3933 * DESCRIPTION: Method to associate a new internal request number independent
3934 * of any associate with framework requests
3935 *
3936 * PARAMETERS :
3937 * @internalFrame#: Output parameter which will have the newly generated internal
3938 *
3939 *
3940 * RETURN : Error code
3941 *
3942 *==========================================================================*/
3943int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3944{
3945 Mutex::Autolock lock(mRegistryLock);
3946 internalFrameNumber = _nextFreeInternalNumber++;
3947 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3948 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3949 purgeOldEntriesLocked();
3950 return NO_ERROR;
3951}
3952
3953/*===========================================================================
3954 * FUNCTION : getFrameworkFrameNumber
3955 *
3956 * DESCRIPTION: Method to query the framework framenumber given an internal #
3957 *
3958 * PARAMETERS :
3959 * @internalFrame#: Internal reference
3960 * @frameworkframenumber: Output parameter holding framework frame entry
3961 *
3962 * RETURN : Error code
3963 *
3964 *==========================================================================*/
3965int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3966 uint32_t &frameworkFrameNumber)
3967{
3968 Mutex::Autolock lock(mRegistryLock);
3969 auto itr = _register.find(internalFrameNumber);
3970 if (itr == _register.end()) {
3971 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3972 return -ENOENT;
3973 }
3974
3975 frameworkFrameNumber = itr->second;
3976 purgeOldEntriesLocked();
3977 return NO_ERROR;
3978}
Thierry Strudel3d639192016-09-09 11:52:26 -07003979
3980/*===========================================================================
3981 * FUNCTION : processCaptureRequest
3982 *
3983 * DESCRIPTION: process a capture request from camera service
3984 *
3985 * PARAMETERS :
3986 * @request : request from framework to process
3987 *
3988 * RETURN :
3989 *
3990 *==========================================================================*/
3991int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003992 camera3_capture_request_t *request,
3993 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003994{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003995 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003996 int rc = NO_ERROR;
3997 int32_t request_id;
3998 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003999 bool isVidBufRequested = false;
4000 camera3_stream_buffer_t *pInputBuffer = NULL;
4001
4002 pthread_mutex_lock(&mMutex);
4003
4004 // Validate current state
4005 switch (mState) {
4006 case CONFIGURED:
4007 case STARTED:
4008 /* valid state */
4009 break;
4010
4011 case ERROR:
4012 pthread_mutex_unlock(&mMutex);
4013 handleCameraDeviceError();
4014 return -ENODEV;
4015
4016 default:
4017 LOGE("Invalid state %d", mState);
4018 pthread_mutex_unlock(&mMutex);
4019 return -ENODEV;
4020 }
4021
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004022 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004023 if (rc != NO_ERROR) {
4024 LOGE("incoming request is not valid");
4025 pthread_mutex_unlock(&mMutex);
4026 return rc;
4027 }
4028
4029 meta = request->settings;
4030
4031 // For first capture request, send capture intent, and
4032 // stream on all streams
4033 if (mState == CONFIGURED) {
4034 // send an unconfigure to the backend so that the isp
4035 // resources are deallocated
4036 if (!mFirstConfiguration) {
4037 cam_stream_size_info_t stream_config_info;
4038 int32_t hal_version = CAM_HAL_V3;
4039 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4040 stream_config_info.buffer_info.min_buffers =
4041 MIN_INFLIGHT_REQUESTS;
4042 stream_config_info.buffer_info.max_buffers =
4043 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4044 clear_metadata_buffer(mParameters);
4045 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4046 CAM_INTF_PARM_HAL_VERSION, hal_version);
4047 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4048 CAM_INTF_META_STREAM_INFO, stream_config_info);
4049 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4050 mParameters);
4051 if (rc < 0) {
4052 LOGE("set_parms for unconfigure failed");
4053 pthread_mutex_unlock(&mMutex);
4054 return rc;
4055 }
4056 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004057 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004058 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004059 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004060 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004061 property_get("persist.camera.is_type", is_type_value, "4");
4062 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4063 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4064 property_get("persist.camera.is_type_preview", is_type_value, "4");
4065 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4066 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004067
4068 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4069 int32_t hal_version = CAM_HAL_V3;
4070 uint8_t captureIntent =
4071 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4072 mCaptureIntent = captureIntent;
4073 clear_metadata_buffer(mParameters);
4074 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4075 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4076 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004077 if (mFirstConfiguration) {
4078 // configure instant AEC
4079 // Instant AEC is a session based parameter and it is needed only
4080 // once per complete session after open camera.
4081 // i.e. This is set only once for the first capture request, after open camera.
4082 setInstantAEC(meta);
4083 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004084 uint8_t fwkVideoStabMode=0;
4085 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4086 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4087 }
4088
4089 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4090 // turn it on for video/preview
4091 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4092 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004093 int32_t vsMode;
4094 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4096 rc = BAD_VALUE;
4097 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004098 LOGD("setEis %d", setEis);
4099 bool eis3Supported = false;
4100 size_t count = IS_TYPE_MAX;
4101 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4102 for (size_t i = 0; i < count; i++) {
4103 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4104 eis3Supported = true;
4105 break;
4106 }
4107 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004108
4109 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004110 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4112 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004113 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4114 is_type = isTypePreview;
4115 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4116 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4117 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004119 } else {
4120 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004121 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004122 } else {
4123 is_type = IS_TYPE_NONE;
4124 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004126 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004127 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4128 }
4129 }
4130
4131 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4132 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4133
4134 int32_t tintless_value = 1;
4135 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4136 CAM_INTF_PARM_TINTLESS, tintless_value);
4137 //Disable CDS for HFR mode or if DIS/EIS is on.
4138 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4139 //after every configure_stream
4140 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4141 (m_bIsVideo)) {
4142 int32_t cds = CAM_CDS_MODE_OFF;
4143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4144 CAM_INTF_PARM_CDS_MODE, cds))
4145 LOGE("Failed to disable CDS for HFR mode");
4146
4147 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004148
4149 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4150 uint8_t* use_av_timer = NULL;
4151
4152 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004153 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004154 use_av_timer = &m_debug_avtimer;
4155 }
4156 else{
4157 use_av_timer =
4158 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004159 if (use_av_timer) {
4160 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4161 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004162 }
4163
4164 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4165 rc = BAD_VALUE;
4166 }
4167 }
4168
Thierry Strudel3d639192016-09-09 11:52:26 -07004169 setMobicat();
4170
4171 /* Set fps and hfr mode while sending meta stream info so that sensor
4172 * can configure appropriate streaming mode */
4173 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004174 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4175 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004176 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4177 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004178 if (rc == NO_ERROR) {
4179 int32_t max_fps =
4180 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004181 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004182 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4183 }
4184 /* For HFR, more buffers are dequeued upfront to improve the performance */
4185 if (mBatchSize) {
4186 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4187 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4188 }
4189 }
4190 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004191 LOGE("setHalFpsRange failed");
4192 }
4193 }
4194 if (meta.exists(ANDROID_CONTROL_MODE)) {
4195 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4196 rc = extractSceneMode(meta, metaMode, mParameters);
4197 if (rc != NO_ERROR) {
4198 LOGE("extractSceneMode failed");
4199 }
4200 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004201 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004202
Thierry Strudel04e026f2016-10-10 11:27:36 -07004203 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4204 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4205 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4206 rc = setVideoHdrMode(mParameters, vhdr);
4207 if (rc != NO_ERROR) {
4208 LOGE("setVideoHDR is failed");
4209 }
4210 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004211
Thierry Strudel3d639192016-09-09 11:52:26 -07004212 //TODO: validate the arguments, HSV scenemode should have only the
4213 //advertised fps ranges
4214
4215 /*set the capture intent, hal version, tintless, stream info,
4216 *and disenable parameters to the backend*/
4217 LOGD("set_parms META_STREAM_INFO " );
4218 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4219 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004220 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004221 mStreamConfigInfo.type[i],
4222 mStreamConfigInfo.stream_sizes[i].width,
4223 mStreamConfigInfo.stream_sizes[i].height,
4224 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004225 mStreamConfigInfo.format[i],
4226 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004227 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004228
Thierry Strudel3d639192016-09-09 11:52:26 -07004229 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4230 mParameters);
4231 if (rc < 0) {
4232 LOGE("set_parms failed for hal version, stream info");
4233 }
4234
4235 cam_dimension_t sensor_dim;
4236 memset(&sensor_dim, 0, sizeof(sensor_dim));
4237 rc = getSensorOutputSize(sensor_dim);
4238 if (rc != NO_ERROR) {
4239 LOGE("Failed to get sensor output size");
4240 pthread_mutex_unlock(&mMutex);
4241 goto error_exit;
4242 }
4243
4244 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4245 gCamCapability[mCameraId]->active_array_size.height,
4246 sensor_dim.width, sensor_dim.height);
4247
4248 /* Set batchmode before initializing channel. Since registerBuffer
4249 * internally initializes some of the channels, better set batchmode
4250 * even before first register buffer */
4251 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4252 it != mStreamInfo.end(); it++) {
4253 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4254 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4255 && mBatchSize) {
4256 rc = channel->setBatchSize(mBatchSize);
4257 //Disable per frame map unmap for HFR/batchmode case
4258 rc |= channel->setPerFrameMapUnmap(false);
4259 if (NO_ERROR != rc) {
4260 LOGE("Channel init failed %d", rc);
4261 pthread_mutex_unlock(&mMutex);
4262 goto error_exit;
4263 }
4264 }
4265 }
4266
4267 //First initialize all streams
4268 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4269 it != mStreamInfo.end(); it++) {
4270 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4271 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4272 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004273 setEis) {
4274 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4275 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4276 is_type = mStreamConfigInfo.is_type[i];
4277 break;
4278 }
4279 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004280 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004281 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004282 rc = channel->initialize(IS_TYPE_NONE);
4283 }
4284 if (NO_ERROR != rc) {
4285 LOGE("Channel initialization failed %d", rc);
4286 pthread_mutex_unlock(&mMutex);
4287 goto error_exit;
4288 }
4289 }
4290
4291 if (mRawDumpChannel) {
4292 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4293 if (rc != NO_ERROR) {
4294 LOGE("Error: Raw Dump Channel init failed");
4295 pthread_mutex_unlock(&mMutex);
4296 goto error_exit;
4297 }
4298 }
4299 if (mSupportChannel) {
4300 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4301 if (rc < 0) {
4302 LOGE("Support channel initialization failed");
4303 pthread_mutex_unlock(&mMutex);
4304 goto error_exit;
4305 }
4306 }
4307 if (mAnalysisChannel) {
4308 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4309 if (rc < 0) {
4310 LOGE("Analysis channel initialization failed");
4311 pthread_mutex_unlock(&mMutex);
4312 goto error_exit;
4313 }
4314 }
4315 if (mDummyBatchChannel) {
4316 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4317 if (rc < 0) {
4318 LOGE("mDummyBatchChannel setBatchSize failed");
4319 pthread_mutex_unlock(&mMutex);
4320 goto error_exit;
4321 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004322 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004323 if (rc < 0) {
4324 LOGE("mDummyBatchChannel initialization failed");
4325 pthread_mutex_unlock(&mMutex);
4326 goto error_exit;
4327 }
4328 }
4329
4330 // Set bundle info
4331 rc = setBundleInfo();
4332 if (rc < 0) {
4333 LOGE("setBundleInfo failed %d", rc);
4334 pthread_mutex_unlock(&mMutex);
4335 goto error_exit;
4336 }
4337
4338 //update settings from app here
4339 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4340 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4341 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4342 }
4343 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4344 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4345 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4346 }
4347 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4348 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4349 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4350
4351 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4352 (mLinkedCameraId != mCameraId) ) {
4353 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4354 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004355 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004356 goto error_exit;
4357 }
4358 }
4359
4360 // add bundle related cameras
4361 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4362 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004363 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4364 &m_pDualCamCmdPtr->bundle_info;
4365 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004366 if (mIsDeviceLinked)
4367 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4368 else
4369 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4370
4371 pthread_mutex_lock(&gCamLock);
4372
4373 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4374 LOGE("Dualcam: Invalid Session Id ");
4375 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004376 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004377 goto error_exit;
4378 }
4379
4380 if (mIsMainCamera == 1) {
4381 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4382 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004383 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004384 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004385 // related session id should be session id of linked session
4386 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4387 } else {
4388 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4389 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004390 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004391 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004392 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4393 }
4394 pthread_mutex_unlock(&gCamLock);
4395
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004396 rc = mCameraHandle->ops->set_dual_cam_cmd(
4397 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004398 if (rc < 0) {
4399 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004400 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004401 goto error_exit;
4402 }
4403 }
4404
4405 //Then start them.
4406 LOGH("Start META Channel");
4407 rc = mMetadataChannel->start();
4408 if (rc < 0) {
4409 LOGE("META channel start failed");
4410 pthread_mutex_unlock(&mMutex);
4411 goto error_exit;
4412 }
4413
4414 if (mAnalysisChannel) {
4415 rc = mAnalysisChannel->start();
4416 if (rc < 0) {
4417 LOGE("Analysis channel start failed");
4418 mMetadataChannel->stop();
4419 pthread_mutex_unlock(&mMutex);
4420 goto error_exit;
4421 }
4422 }
4423
4424 if (mSupportChannel) {
4425 rc = mSupportChannel->start();
4426 if (rc < 0) {
4427 LOGE("Support channel start failed");
4428 mMetadataChannel->stop();
4429 /* Although support and analysis are mutually exclusive today
4430 adding it in anycase for future proofing */
4431 if (mAnalysisChannel) {
4432 mAnalysisChannel->stop();
4433 }
4434 pthread_mutex_unlock(&mMutex);
4435 goto error_exit;
4436 }
4437 }
4438 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4439 it != mStreamInfo.end(); it++) {
4440 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4441 LOGH("Start Processing Channel mask=%d",
4442 channel->getStreamTypeMask());
4443 rc = channel->start();
4444 if (rc < 0) {
4445 LOGE("channel start failed");
4446 pthread_mutex_unlock(&mMutex);
4447 goto error_exit;
4448 }
4449 }
4450
4451 if (mRawDumpChannel) {
4452 LOGD("Starting raw dump stream");
4453 rc = mRawDumpChannel->start();
4454 if (rc != NO_ERROR) {
4455 LOGE("Error Starting Raw Dump Channel");
4456 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4457 it != mStreamInfo.end(); it++) {
4458 QCamera3Channel *channel =
4459 (QCamera3Channel *)(*it)->stream->priv;
4460 LOGH("Stopping Processing Channel mask=%d",
4461 channel->getStreamTypeMask());
4462 channel->stop();
4463 }
4464 if (mSupportChannel)
4465 mSupportChannel->stop();
4466 if (mAnalysisChannel) {
4467 mAnalysisChannel->stop();
4468 }
4469 mMetadataChannel->stop();
4470 pthread_mutex_unlock(&mMutex);
4471 goto error_exit;
4472 }
4473 }
4474
4475 if (mChannelHandle) {
4476
4477 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4478 mChannelHandle);
4479 if (rc != NO_ERROR) {
4480 LOGE("start_channel failed %d", rc);
4481 pthread_mutex_unlock(&mMutex);
4482 goto error_exit;
4483 }
4484 }
4485
4486 goto no_error;
4487error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004488 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004489 return rc;
4490no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004491 mWokenUpByDaemon = false;
4492 mPendingLiveRequest = 0;
4493 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004494 }
4495
4496 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004497 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004498
4499 if (mFlushPerf) {
4500 //we cannot accept any requests during flush
4501 LOGE("process_capture_request cannot proceed during flush");
4502 pthread_mutex_unlock(&mMutex);
4503 return NO_ERROR; //should return an error
4504 }
4505
4506 if (meta.exists(ANDROID_REQUEST_ID)) {
4507 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4508 mCurrentRequestId = request_id;
4509 LOGD("Received request with id: %d", request_id);
4510 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4511 LOGE("Unable to find request id field, \
4512 & no previous id available");
4513 pthread_mutex_unlock(&mMutex);
4514 return NAME_NOT_FOUND;
4515 } else {
4516 LOGD("Re-using old request id");
4517 request_id = mCurrentRequestId;
4518 }
4519
4520 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4521 request->num_output_buffers,
4522 request->input_buffer,
4523 frameNumber);
4524 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004525 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004526 int blob_request = 0;
4527 uint32_t snapshotStreamId = 0;
4528 for (size_t i = 0; i < request->num_output_buffers; i++) {
4529 const camera3_stream_buffer_t& output = request->output_buffers[i];
4530 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4531
4532 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004533 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004534 blob_request = 1;
4535 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4536 }
4537
4538 if (output.acquire_fence != -1) {
4539 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4540 close(output.acquire_fence);
4541 if (rc != OK) {
4542 LOGE("sync wait failed %d", rc);
4543 pthread_mutex_unlock(&mMutex);
4544 return rc;
4545 }
4546 }
4547
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004548 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004549 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004550
4551 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4552 isVidBufRequested = true;
4553 }
4554 }
4555
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004556 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4557 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4558 itr++) {
4559 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4560 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4561 channel->getStreamID(channel->getStreamTypeMask());
4562
4563 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4564 isVidBufRequested = true;
4565 }
4566 }
4567
Thierry Strudel3d639192016-09-09 11:52:26 -07004568 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004569 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004570 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004571 }
4572 if (blob_request && mRawDumpChannel) {
4573 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004574 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004575 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004576 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004577 }
4578
4579 if(request->input_buffer == NULL) {
4580 /* Parse the settings:
4581 * - For every request in NORMAL MODE
4582 * - For every request in HFR mode during preview only case
4583 * - For first request of every batch in HFR mode during video
4584 * recording. In batchmode the same settings except frame number is
4585 * repeated in each request of the batch.
4586 */
4587 if (!mBatchSize ||
4588 (mBatchSize && !isVidBufRequested) ||
4589 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004590 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004591 if (rc < 0) {
4592 LOGE("fail to set frame parameters");
4593 pthread_mutex_unlock(&mMutex);
4594 return rc;
4595 }
4596 }
4597 /* For batchMode HFR, setFrameParameters is not called for every
4598 * request. But only frame number of the latest request is parsed.
4599 * Keep track of first and last frame numbers in a batch so that
4600 * metadata for the frame numbers of batch can be duplicated in
4601 * handleBatchMetadta */
4602 if (mBatchSize) {
4603 if (!mToBeQueuedVidBufs) {
4604 //start of the batch
4605 mFirstFrameNumberInBatch = request->frame_number;
4606 }
4607 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4608 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4609 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004610 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004611 return BAD_VALUE;
4612 }
4613 }
4614 if (mNeedSensorRestart) {
4615 /* Unlock the mutex as restartSensor waits on the channels to be
4616 * stopped, which in turn calls stream callback functions -
4617 * handleBufferWithLock and handleMetadataWithLock */
4618 pthread_mutex_unlock(&mMutex);
4619 rc = dynamicUpdateMetaStreamInfo();
4620 if (rc != NO_ERROR) {
4621 LOGE("Restarting the sensor failed");
4622 return BAD_VALUE;
4623 }
4624 mNeedSensorRestart = false;
4625 pthread_mutex_lock(&mMutex);
4626 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004627 if(mResetInstantAEC) {
4628 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4629 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4630 mResetInstantAEC = false;
4631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004632 } else {
4633
4634 if (request->input_buffer->acquire_fence != -1) {
4635 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4636 close(request->input_buffer->acquire_fence);
4637 if (rc != OK) {
4638 LOGE("input buffer sync wait failed %d", rc);
4639 pthread_mutex_unlock(&mMutex);
4640 return rc;
4641 }
4642 }
4643 }
4644
4645 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4646 mLastCustIntentFrmNum = frameNumber;
4647 }
4648 /* Update pending request list and pending buffers map */
4649 PendingRequestInfo pendingRequest;
4650 pendingRequestIterator latestRequest;
4651 pendingRequest.frame_number = frameNumber;
4652 pendingRequest.num_buffers = request->num_output_buffers;
4653 pendingRequest.request_id = request_id;
4654 pendingRequest.blob_request = blob_request;
4655 pendingRequest.timestamp = 0;
4656 pendingRequest.bUrgentReceived = 0;
4657 if (request->input_buffer) {
4658 pendingRequest.input_buffer =
4659 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4660 *(pendingRequest.input_buffer) = *(request->input_buffer);
4661 pInputBuffer = pendingRequest.input_buffer;
4662 } else {
4663 pendingRequest.input_buffer = NULL;
4664 pInputBuffer = NULL;
4665 }
4666
4667 pendingRequest.pipeline_depth = 0;
4668 pendingRequest.partial_result_cnt = 0;
4669 extractJpegMetadata(mCurJpegMeta, request);
4670 pendingRequest.jpegMetadata = mCurJpegMeta;
4671 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4672 pendingRequest.shutter_notified = false;
4673
4674 //extract capture intent
4675 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4676 mCaptureIntent =
4677 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4678 }
4679 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004680 /* DevCamDebug metadata processCaptureRequest */
4681 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4682 mDevCamDebugMetaEnable =
4683 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4684 }
4685 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4686 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004687
4688 //extract CAC info
4689 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4690 mCacMode =
4691 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4692 }
4693 pendingRequest.fwkCacMode = mCacMode;
4694
4695 PendingBuffersInRequest bufsForCurRequest;
4696 bufsForCurRequest.frame_number = frameNumber;
4697 // Mark current timestamp for the new request
4698 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4699
4700 for (size_t i = 0; i < request->num_output_buffers; i++) {
4701 RequestedBufferInfo requestedBuf;
4702 memset(&requestedBuf, 0, sizeof(requestedBuf));
4703 requestedBuf.stream = request->output_buffers[i].stream;
4704 requestedBuf.buffer = NULL;
4705 pendingRequest.buffers.push_back(requestedBuf);
4706
4707 // Add to buffer handle the pending buffers list
4708 PendingBufferInfo bufferInfo;
4709 bufferInfo.buffer = request->output_buffers[i].buffer;
4710 bufferInfo.stream = request->output_buffers[i].stream;
4711 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4712 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4713 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4714 frameNumber, bufferInfo.buffer,
4715 channel->getStreamTypeMask(), bufferInfo.stream->format);
4716 }
4717 // Add this request packet into mPendingBuffersMap
4718 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4719 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4720 mPendingBuffersMap.get_num_overall_buffers());
4721
4722 latestRequest = mPendingRequestsList.insert(
4723 mPendingRequestsList.end(), pendingRequest);
4724 if(mFlush) {
4725 LOGI("mFlush is true");
4726 pthread_mutex_unlock(&mMutex);
4727 return NO_ERROR;
4728 }
4729
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004730 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004731 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004732 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004733
4734 if(request->input_buffer != NULL){
4735 LOGD("Input request, frame_number %d", frameNumber);
4736 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4737 if (NO_ERROR != rc) {
4738 LOGE("fail to set reproc parameters");
4739 pthread_mutex_unlock(&mMutex);
4740 return rc;
4741 }
4742 }
4743
4744 // Call request on other streams
4745 uint32_t streams_need_metadata = 0;
4746 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4747 for (size_t i = 0; i < request->num_output_buffers; i++) {
4748 const camera3_stream_buffer_t& output = request->output_buffers[i];
4749 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4750
4751 if (channel == NULL) {
4752 LOGW("invalid channel pointer for stream");
4753 continue;
4754 }
4755
4756 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4757 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4758 output.buffer, request->input_buffer, frameNumber);
4759 if(request->input_buffer != NULL){
4760 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004761 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004762 if (rc < 0) {
4763 LOGE("Fail to request on picture channel");
4764 pthread_mutex_unlock(&mMutex);
4765 return rc;
4766 }
4767 } else {
4768 LOGD("snapshot request with buffer %p, frame_number %d",
4769 output.buffer, frameNumber);
4770 if (!request->settings) {
4771 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004772 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 } else {
4774 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004775 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004776 }
4777 if (rc < 0) {
4778 LOGE("Fail to request on picture channel");
4779 pthread_mutex_unlock(&mMutex);
4780 return rc;
4781 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004782
4783 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4784 uint32_t j = 0;
4785 for (j = 0; j < streamsArray.num_streams; j++) {
4786 if (streamsArray.stream_request[j].streamID == streamId) {
4787 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4788 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4789 else
4790 streamsArray.stream_request[j].buf_index = indexUsed;
4791 break;
4792 }
4793 }
4794 if (j == streamsArray.num_streams) {
4795 LOGE("Did not find matching stream to update index");
4796 assert(0);
4797 }
4798
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 pendingBufferIter->need_metadata = true;
4800 streams_need_metadata++;
4801 }
4802 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4803 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4805 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004806 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4807 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004808 if (rc < 0) {
4809 LOGE("Fail to request on YUV channel");
4810 pthread_mutex_unlock(&mMutex);
4811 return rc;
4812 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004813
4814 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4815 uint32_t j = 0;
4816 for (j = 0; j < streamsArray.num_streams; j++) {
4817 if (streamsArray.stream_request[j].streamID == streamId) {
4818 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4819 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4820 else
4821 streamsArray.stream_request[j].buf_index = indexUsed;
4822 break;
4823 }
4824 }
4825 if (j == streamsArray.num_streams) {
4826 LOGE("Did not find matching stream to update index");
4827 assert(0);
4828 }
4829
Thierry Strudel3d639192016-09-09 11:52:26 -07004830 pendingBufferIter->need_metadata = needMetadata;
4831 if (needMetadata)
4832 streams_need_metadata += 1;
4833 LOGD("calling YUV channel request, need_metadata is %d",
4834 needMetadata);
4835 } else {
4836 LOGD("request with buffer %p, frame_number %d",
4837 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004838
4839 rc = channel->request(output.buffer, frameNumber, indexUsed);
4840
4841 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4842 uint32_t j = 0;
4843 for (j = 0; j < streamsArray.num_streams; j++) {
4844 if (streamsArray.stream_request[j].streamID == streamId) {
4845 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4846 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4847 else
4848 streamsArray.stream_request[j].buf_index = indexUsed;
4849 break;
4850 }
4851 }
4852 if (j == streamsArray.num_streams) {
4853 LOGE("Did not find matching stream to update index");
4854 assert(0);
4855 }
4856
Thierry Strudel3d639192016-09-09 11:52:26 -07004857 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4858 && mBatchSize) {
4859 mToBeQueuedVidBufs++;
4860 if (mToBeQueuedVidBufs == mBatchSize) {
4861 channel->queueBatchBuf();
4862 }
4863 }
4864 if (rc < 0) {
4865 LOGE("request failed");
4866 pthread_mutex_unlock(&mMutex);
4867 return rc;
4868 }
4869 }
4870 pendingBufferIter++;
4871 }
4872
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004873 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4874 itr++) {
4875 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4876
4877 if (channel == NULL) {
4878 LOGE("invalid channel pointer for stream");
4879 assert(0);
4880 return BAD_VALUE;
4881 }
4882
4883 InternalRequest requestedStream;
4884 requestedStream = (*itr);
4885
4886
4887 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4888 LOGD("snapshot request internally input buffer %p, frame_number %d",
4889 request->input_buffer, frameNumber);
4890 if(request->input_buffer != NULL){
4891 rc = channel->request(NULL, frameNumber,
4892 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4893 if (rc < 0) {
4894 LOGE("Fail to request on picture channel");
4895 pthread_mutex_unlock(&mMutex);
4896 return rc;
4897 }
4898 } else {
4899 LOGD("snapshot request with frame_number %d", frameNumber);
4900 if (!request->settings) {
4901 rc = channel->request(NULL, frameNumber,
4902 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4903 } else {
4904 rc = channel->request(NULL, frameNumber,
4905 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4906 }
4907 if (rc < 0) {
4908 LOGE("Fail to request on picture channel");
4909 pthread_mutex_unlock(&mMutex);
4910 return rc;
4911 }
4912
4913 if ((*itr).meteringOnly != 1) {
4914 requestedStream.need_metadata = 1;
4915 streams_need_metadata++;
4916 }
4917 }
4918
4919 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4920 uint32_t j = 0;
4921 for (j = 0; j < streamsArray.num_streams; j++) {
4922 if (streamsArray.stream_request[j].streamID == streamId) {
4923 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4924 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4925 else
4926 streamsArray.stream_request[j].buf_index = indexUsed;
4927 break;
4928 }
4929 }
4930 if (j == streamsArray.num_streams) {
4931 LOGE("Did not find matching stream to update index");
4932 assert(0);
4933 }
4934
4935 } else {
4936 LOGE("Internal requests not supported on this stream type");
4937 assert(0);
4938 return INVALID_OPERATION;
4939 }
4940 latestRequest->internalRequestList.push_back(requestedStream);
4941 }
4942
Thierry Strudel3d639192016-09-09 11:52:26 -07004943 //If 2 streams have need_metadata set to true, fail the request, unless
4944 //we copy/reference count the metadata buffer
4945 if (streams_need_metadata > 1) {
4946 LOGE("not supporting request in which two streams requires"
4947 " 2 HAL metadata for reprocessing");
4948 pthread_mutex_unlock(&mMutex);
4949 return -EINVAL;
4950 }
4951
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004952 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 /* Set the parameters to backend:
4954 * - For every request in NORMAL MODE
4955 * - For every request in HFR mode during preview only case
4956 * - Once every batch in HFR mode during video recording
4957 */
4958 if (!mBatchSize ||
4959 (mBatchSize && !isVidBufRequested) ||
4960 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4961 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4962 mBatchSize, isVidBufRequested,
4963 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004964
4965 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4966 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4967 uint32_t m = 0;
4968 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4969 if (streamsArray.stream_request[k].streamID ==
4970 mBatchedStreamsArray.stream_request[m].streamID)
4971 break;
4972 }
4973 if (m == mBatchedStreamsArray.num_streams) {
4974 mBatchedStreamsArray.stream_request\
4975 [mBatchedStreamsArray.num_streams].streamID =
4976 streamsArray.stream_request[k].streamID;
4977 mBatchedStreamsArray.stream_request\
4978 [mBatchedStreamsArray.num_streams].buf_index =
4979 streamsArray.stream_request[k].buf_index;
4980 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4981 }
4982 }
4983 streamsArray = mBatchedStreamsArray;
4984 }
4985 /* Update stream id of all the requested buffers */
4986 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4987 LOGE("Failed to set stream type mask in the parameters");
4988 return BAD_VALUE;
4989 }
4990
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4992 mParameters);
4993 if (rc < 0) {
4994 LOGE("set_parms failed");
4995 }
4996 /* reset to zero coz, the batch is queued */
4997 mToBeQueuedVidBufs = 0;
4998 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004999 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5000 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
5001 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5002 uint32_t m = 0;
5003 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5004 if (streamsArray.stream_request[k].streamID ==
5005 mBatchedStreamsArray.stream_request[m].streamID)
5006 break;
5007 }
5008 if (m == mBatchedStreamsArray.num_streams) {
5009 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5010 streamsArray.stream_request[k].streamID;
5011 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5012 streamsArray.stream_request[k].buf_index;
5013 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5014 }
5015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 }
5017 mPendingLiveRequest++;
5018 }
5019
5020 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5021
5022 mState = STARTED;
5023 // Added a timed condition wait
5024 struct timespec ts;
5025 uint8_t isValidTimeout = 1;
5026 rc = clock_gettime(CLOCK_REALTIME, &ts);
5027 if (rc < 0) {
5028 isValidTimeout = 0;
5029 LOGE("Error reading the real time clock!!");
5030 }
5031 else {
5032 // Make timeout as 5 sec for request to be honored
5033 ts.tv_sec += 5;
5034 }
5035 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 (mState != ERROR) && (mState != DEINIT)) {
5038 if (!isValidTimeout) {
5039 LOGD("Blocking on conditional wait");
5040 pthread_cond_wait(&mRequestCond, &mMutex);
5041 }
5042 else {
5043 LOGD("Blocking on timed conditional wait");
5044 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5045 if (rc == ETIMEDOUT) {
5046 rc = -ENODEV;
5047 LOGE("Unblocked on timeout!!!!");
5048 break;
5049 }
5050 }
5051 LOGD("Unblocked");
5052 if (mWokenUpByDaemon) {
5053 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005054 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 break;
5056 }
5057 }
5058 pthread_mutex_unlock(&mMutex);
5059
5060 return rc;
5061}
5062
5063/*===========================================================================
5064 * FUNCTION : dump
5065 *
5066 * DESCRIPTION:
5067 *
5068 * PARAMETERS :
5069 *
5070 *
5071 * RETURN :
5072 *==========================================================================*/
5073void QCamera3HardwareInterface::dump(int fd)
5074{
5075 pthread_mutex_lock(&mMutex);
5076 dprintf(fd, "\n Camera HAL3 information Begin \n");
5077
5078 dprintf(fd, "\nNumber of pending requests: %zu \n",
5079 mPendingRequestsList.size());
5080 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5081 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5082 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5083 for(pendingRequestIterator i = mPendingRequestsList.begin();
5084 i != mPendingRequestsList.end(); i++) {
5085 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5086 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5087 i->input_buffer);
5088 }
5089 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5090 mPendingBuffersMap.get_num_overall_buffers());
5091 dprintf(fd, "-------+------------------\n");
5092 dprintf(fd, " Frame | Stream type mask \n");
5093 dprintf(fd, "-------+------------------\n");
5094 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5095 for(auto &j : req.mPendingBufferList) {
5096 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5097 dprintf(fd, " %5d | %11d \n",
5098 req.frame_number, channel->getStreamTypeMask());
5099 }
5100 }
5101 dprintf(fd, "-------+------------------\n");
5102
5103 dprintf(fd, "\nPending frame drop list: %zu\n",
5104 mPendingFrameDropList.size());
5105 dprintf(fd, "-------+-----------\n");
5106 dprintf(fd, " Frame | Stream ID \n");
5107 dprintf(fd, "-------+-----------\n");
5108 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5109 i != mPendingFrameDropList.end(); i++) {
5110 dprintf(fd, " %5d | %9d \n",
5111 i->frame_number, i->stream_ID);
5112 }
5113 dprintf(fd, "-------+-----------\n");
5114
5115 dprintf(fd, "\n Camera HAL3 information End \n");
5116
5117 /* use dumpsys media.camera as trigger to send update debug level event */
5118 mUpdateDebugLevel = true;
5119 pthread_mutex_unlock(&mMutex);
5120 return;
5121}
5122
5123/*===========================================================================
5124 * FUNCTION : flush
5125 *
5126 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5127 * conditionally restarts channels
5128 *
5129 * PARAMETERS :
5130 * @ restartChannels: re-start all channels
5131 *
5132 *
5133 * RETURN :
5134 * 0 on success
5135 * Error code on failure
5136 *==========================================================================*/
5137int QCamera3HardwareInterface::flush(bool restartChannels)
5138{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005139 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 int32_t rc = NO_ERROR;
5141
5142 LOGD("Unblocking Process Capture Request");
5143 pthread_mutex_lock(&mMutex);
5144 mFlush = true;
5145 pthread_mutex_unlock(&mMutex);
5146
5147 rc = stopAllChannels();
5148 // unlink of dualcam
5149 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005150 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5151 &m_pDualCamCmdPtr->bundle_info;
5152 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005153 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5154 pthread_mutex_lock(&gCamLock);
5155
5156 if (mIsMainCamera == 1) {
5157 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5158 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005159 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 // related session id should be session id of linked session
5161 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5162 } else {
5163 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5164 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005165 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005166 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5167 }
5168 pthread_mutex_unlock(&gCamLock);
5169
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005170 rc = mCameraHandle->ops->set_dual_cam_cmd(
5171 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005172 if (rc < 0) {
5173 LOGE("Dualcam: Unlink failed, but still proceed to close");
5174 }
5175 }
5176
5177 if (rc < 0) {
5178 LOGE("stopAllChannels failed");
5179 return rc;
5180 }
5181 if (mChannelHandle) {
5182 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5183 mChannelHandle);
5184 }
5185
5186 // Reset bundle info
5187 rc = setBundleInfo();
5188 if (rc < 0) {
5189 LOGE("setBundleInfo failed %d", rc);
5190 return rc;
5191 }
5192
5193 // Mutex Lock
5194 pthread_mutex_lock(&mMutex);
5195
5196 // Unblock process_capture_request
5197 mPendingLiveRequest = 0;
5198 pthread_cond_signal(&mRequestCond);
5199
5200 rc = notifyErrorForPendingRequests();
5201 if (rc < 0) {
5202 LOGE("notifyErrorForPendingRequests failed");
5203 pthread_mutex_unlock(&mMutex);
5204 return rc;
5205 }
5206
5207 mFlush = false;
5208
5209 // Start the Streams/Channels
5210 if (restartChannels) {
5211 rc = startAllChannels();
5212 if (rc < 0) {
5213 LOGE("startAllChannels failed");
5214 pthread_mutex_unlock(&mMutex);
5215 return rc;
5216 }
5217 }
5218
5219 if (mChannelHandle) {
5220 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5221 mChannelHandle);
5222 if (rc < 0) {
5223 LOGE("start_channel failed");
5224 pthread_mutex_unlock(&mMutex);
5225 return rc;
5226 }
5227 }
5228
5229 pthread_mutex_unlock(&mMutex);
5230
5231 return 0;
5232}
5233
5234/*===========================================================================
5235 * FUNCTION : flushPerf
5236 *
5237 * DESCRIPTION: This is the performance optimization version of flush that does
5238 * not use stream off, rather flushes the system
5239 *
5240 * PARAMETERS :
5241 *
5242 *
5243 * RETURN : 0 : success
5244 * -EINVAL: input is malformed (device is not valid)
5245 * -ENODEV: if the device has encountered a serious error
5246 *==========================================================================*/
5247int QCamera3HardwareInterface::flushPerf()
5248{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005249 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 int32_t rc = 0;
5251 struct timespec timeout;
5252 bool timed_wait = false;
5253
5254 pthread_mutex_lock(&mMutex);
5255 mFlushPerf = true;
5256 mPendingBuffersMap.numPendingBufsAtFlush =
5257 mPendingBuffersMap.get_num_overall_buffers();
5258 LOGD("Calling flush. Wait for %d buffers to return",
5259 mPendingBuffersMap.numPendingBufsAtFlush);
5260
5261 /* send the flush event to the backend */
5262 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5263 if (rc < 0) {
5264 LOGE("Error in flush: IOCTL failure");
5265 mFlushPerf = false;
5266 pthread_mutex_unlock(&mMutex);
5267 return -ENODEV;
5268 }
5269
5270 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5271 LOGD("No pending buffers in HAL, return flush");
5272 mFlushPerf = false;
5273 pthread_mutex_unlock(&mMutex);
5274 return rc;
5275 }
5276
5277 /* wait on a signal that buffers were received */
5278 rc = clock_gettime(CLOCK_REALTIME, &timeout);
5279 if (rc < 0) {
5280 LOGE("Error reading the real time clock, cannot use timed wait");
5281 } else {
5282 timeout.tv_sec += FLUSH_TIMEOUT;
5283 timed_wait = true;
5284 }
5285
5286 //Block on conditional variable
5287 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5288 LOGD("Waiting on mBuffersCond");
5289 if (!timed_wait) {
5290 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5291 if (rc != 0) {
5292 LOGE("pthread_cond_wait failed due to rc = %s",
5293 strerror(rc));
5294 break;
5295 }
5296 } else {
5297 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5298 if (rc != 0) {
5299 LOGE("pthread_cond_timedwait failed due to rc = %s",
5300 strerror(rc));
5301 break;
5302 }
5303 }
5304 }
5305 if (rc != 0) {
5306 mFlushPerf = false;
5307 pthread_mutex_unlock(&mMutex);
5308 return -ENODEV;
5309 }
5310
5311 LOGD("Received buffers, now safe to return them");
5312
5313 //make sure the channels handle flush
5314 //currently only required for the picture channel to release snapshot resources
5315 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5316 it != mStreamInfo.end(); it++) {
5317 QCamera3Channel *channel = (*it)->channel;
5318 if (channel) {
5319 rc = channel->flush();
5320 if (rc) {
5321 LOGE("Flushing the channels failed with error %d", rc);
5322 // even though the channel flush failed we need to continue and
5323 // return the buffers we have to the framework, however the return
5324 // value will be an error
5325 rc = -ENODEV;
5326 }
5327 }
5328 }
5329
5330 /* notify the frameworks and send errored results */
5331 rc = notifyErrorForPendingRequests();
5332 if (rc < 0) {
5333 LOGE("notifyErrorForPendingRequests failed");
5334 pthread_mutex_unlock(&mMutex);
5335 return rc;
5336 }
5337
5338 //unblock process_capture_request
5339 mPendingLiveRequest = 0;
5340 unblockRequestIfNecessary();
5341
5342 mFlushPerf = false;
5343 pthread_mutex_unlock(&mMutex);
5344 LOGD ("Flush Operation complete. rc = %d", rc);
5345 return rc;
5346}
5347
5348/*===========================================================================
5349 * FUNCTION : handleCameraDeviceError
5350 *
5351 * DESCRIPTION: This function calls internal flush and notifies the error to
5352 * framework and updates the state variable.
5353 *
5354 * PARAMETERS : None
5355 *
5356 * RETURN : NO_ERROR on Success
5357 * Error code on failure
5358 *==========================================================================*/
5359int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5360{
5361 int32_t rc = NO_ERROR;
5362
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005363 {
5364 Mutex::Autolock lock(mFlushLock);
5365 pthread_mutex_lock(&mMutex);
5366 if (mState != ERROR) {
5367 //if mState != ERROR, nothing to be done
5368 pthread_mutex_unlock(&mMutex);
5369 return NO_ERROR;
5370 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005372
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005373 rc = flush(false /* restart channels */);
5374 if (NO_ERROR != rc) {
5375 LOGE("internal flush to handle mState = ERROR failed");
5376 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005377
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005378 pthread_mutex_lock(&mMutex);
5379 mState = DEINIT;
5380 pthread_mutex_unlock(&mMutex);
5381 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005382
5383 camera3_notify_msg_t notify_msg;
5384 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5385 notify_msg.type = CAMERA3_MSG_ERROR;
5386 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5387 notify_msg.message.error.error_stream = NULL;
5388 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005389 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005390
5391 return rc;
5392}
5393
5394/*===========================================================================
5395 * FUNCTION : captureResultCb
5396 *
5397 * DESCRIPTION: Callback handler for all capture result
5398 * (streams, as well as metadata)
5399 *
5400 * PARAMETERS :
5401 * @metadata : metadata information
5402 * @buffer : actual gralloc buffer to be returned to frameworks.
5403 * NULL if metadata.
5404 *
5405 * RETURN : NONE
5406 *==========================================================================*/
5407void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5408 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5409{
5410 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005411 pthread_mutex_lock(&mMutex);
5412 uint8_t batchSize = mBatchSize;
5413 pthread_mutex_unlock(&mMutex);
5414 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 handleBatchMetadata(metadata_buf,
5416 true /* free_and_bufdone_meta_buf */);
5417 } else { /* mBatchSize = 0 */
5418 hdrPlusPerfLock(metadata_buf);
5419 pthread_mutex_lock(&mMutex);
5420 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005421 true /* free_and_bufdone_meta_buf */,
5422 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 pthread_mutex_unlock(&mMutex);
5424 }
5425 } else if (isInputBuffer) {
5426 pthread_mutex_lock(&mMutex);
5427 handleInputBufferWithLock(frame_number);
5428 pthread_mutex_unlock(&mMutex);
5429 } else {
5430 pthread_mutex_lock(&mMutex);
5431 handleBufferWithLock(buffer, frame_number);
5432 pthread_mutex_unlock(&mMutex);
5433 }
5434 return;
5435}
5436
5437/*===========================================================================
5438 * FUNCTION : getReprocessibleOutputStreamId
5439 *
5440 * DESCRIPTION: Get source output stream id for the input reprocess stream
5441 * based on size and format, which would be the largest
5442 * output stream if an input stream exists.
5443 *
5444 * PARAMETERS :
5445 * @id : return the stream id if found
5446 *
5447 * RETURN : int32_t type of status
5448 * NO_ERROR -- success
5449 * none-zero failure code
5450 *==========================================================================*/
5451int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5452{
5453 /* check if any output or bidirectional stream with the same size and format
5454 and return that stream */
5455 if ((mInputStreamInfo.dim.width > 0) &&
5456 (mInputStreamInfo.dim.height > 0)) {
5457 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5458 it != mStreamInfo.end(); it++) {
5459
5460 camera3_stream_t *stream = (*it)->stream;
5461 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5462 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5463 (stream->format == mInputStreamInfo.format)) {
5464 // Usage flag for an input stream and the source output stream
5465 // may be different.
5466 LOGD("Found reprocessible output stream! %p", *it);
5467 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5468 stream->usage, mInputStreamInfo.usage);
5469
5470 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5471 if (channel != NULL && channel->mStreams[0]) {
5472 id = channel->mStreams[0]->getMyServerID();
5473 return NO_ERROR;
5474 }
5475 }
5476 }
5477 } else {
5478 LOGD("No input stream, so no reprocessible output stream");
5479 }
5480 return NAME_NOT_FOUND;
5481}
5482
5483/*===========================================================================
5484 * FUNCTION : lookupFwkName
5485 *
5486 * DESCRIPTION: In case the enum is not same in fwk and backend
5487 * make sure the parameter is correctly propogated
5488 *
5489 * PARAMETERS :
5490 * @arr : map between the two enums
5491 * @len : len of the map
5492 * @hal_name : name of the hal_parm to map
5493 *
5494 * RETURN : int type of status
5495 * fwk_name -- success
5496 * none-zero failure code
5497 *==========================================================================*/
5498template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5499 size_t len, halType hal_name)
5500{
5501
5502 for (size_t i = 0; i < len; i++) {
5503 if (arr[i].hal_name == hal_name) {
5504 return arr[i].fwk_name;
5505 }
5506 }
5507
5508 /* Not able to find matching framework type is not necessarily
5509 * an error case. This happens when mm-camera supports more attributes
5510 * than the frameworks do */
5511 LOGH("Cannot find matching framework type");
5512 return NAME_NOT_FOUND;
5513}
5514
5515/*===========================================================================
5516 * FUNCTION : lookupHalName
5517 *
5518 * DESCRIPTION: In case the enum is not same in fwk and backend
5519 * make sure the parameter is correctly propogated
5520 *
5521 * PARAMETERS :
5522 * @arr : map between the two enums
5523 * @len : len of the map
5524 * @fwk_name : name of the hal_parm to map
5525 *
5526 * RETURN : int32_t type of status
5527 * hal_name -- success
5528 * none-zero failure code
5529 *==========================================================================*/
5530template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5531 size_t len, fwkType fwk_name)
5532{
5533 for (size_t i = 0; i < len; i++) {
5534 if (arr[i].fwk_name == fwk_name) {
5535 return arr[i].hal_name;
5536 }
5537 }
5538
5539 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5540 return NAME_NOT_FOUND;
5541}
5542
5543/*===========================================================================
5544 * FUNCTION : lookupProp
5545 *
5546 * DESCRIPTION: lookup a value by its name
5547 *
5548 * PARAMETERS :
5549 * @arr : map between the two enums
5550 * @len : size of the map
5551 * @name : name to be looked up
5552 *
5553 * RETURN : Value if found
5554 * CAM_CDS_MODE_MAX if not found
5555 *==========================================================================*/
5556template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5557 size_t len, const char *name)
5558{
5559 if (name) {
5560 for (size_t i = 0; i < len; i++) {
5561 if (!strcmp(arr[i].desc, name)) {
5562 return arr[i].val;
5563 }
5564 }
5565 }
5566 return CAM_CDS_MODE_MAX;
5567}
5568
5569/*===========================================================================
5570 *
5571 * DESCRIPTION:
5572 *
5573 * PARAMETERS :
5574 * @metadata : metadata information from callback
5575 * @timestamp: metadata buffer timestamp
5576 * @request_id: request id
5577 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005578 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5579 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005580 * @pprocDone: whether internal offline postprocsesing is done
5581 *
5582 * RETURN : camera_metadata_t*
5583 * metadata in a format specified by fwk
5584 *==========================================================================*/
5585camera_metadata_t*
5586QCamera3HardwareInterface::translateFromHalMetadata(
5587 metadata_buffer_t *metadata,
5588 nsecs_t timestamp,
5589 int32_t request_id,
5590 const CameraMetadata& jpegMetadata,
5591 uint8_t pipeline_depth,
5592 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005593 /* DevCamDebug metadata translateFromHalMetadata argument */
5594 uint8_t DevCamDebug_meta_enable,
5595 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005596 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005597 uint8_t fwk_cacMode,
5598 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005599{
5600 CameraMetadata camMetadata;
5601 camera_metadata_t *resultMetadata;
5602
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005603 if (mBatchSize && !firstMetadataInBatch) {
5604 /* In batch mode, use cached metadata from the first metadata
5605 in the batch */
5606 camMetadata.clear();
5607 camMetadata = mCachedMetadata;
5608 }
5609
Thierry Strudel3d639192016-09-09 11:52:26 -07005610 if (jpegMetadata.entryCount())
5611 camMetadata.append(jpegMetadata);
5612
5613 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5614 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5615 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5616 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005617 if (mBatchSize == 0) {
5618 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5619 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5620 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005621
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005622 if (mBatchSize && !firstMetadataInBatch) {
5623 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5624 resultMetadata = camMetadata.release();
5625 return resultMetadata;
5626 }
5627
Samuel Ha68ba5172016-12-15 18:41:12 -08005628 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5629 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5630 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5631 // DevCamDebug metadata translateFromHalMetadata AF
5632 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5633 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5634 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5635 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5636 }
5637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5638 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5639 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5640 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5641 }
5642 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5643 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5644 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5645 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5646 }
5647 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5648 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5649 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5650 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5651 }
5652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5653 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5654 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5655 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5656 }
5657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5658 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5659 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5660 *DevCamDebug_af_monitor_pdaf_target_pos;
5661 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5662 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5663 }
5664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5665 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5666 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5667 *DevCamDebug_af_monitor_pdaf_confidence;
5668 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5669 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5670 }
5671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5672 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5673 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5674 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5675 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5676 }
5677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5678 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5679 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5680 *DevCamDebug_af_monitor_tof_target_pos;
5681 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5682 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5683 }
5684 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5685 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5686 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5687 *DevCamDebug_af_monitor_tof_confidence;
5688 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5689 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5690 }
5691 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5692 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5693 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5694 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5695 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5696 }
5697 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5698 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5699 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5700 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5701 &fwk_DevCamDebug_af_monitor_type_select, 1);
5702 }
5703 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5704 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5705 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5706 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5707 &fwk_DevCamDebug_af_monitor_refocus, 1);
5708 }
5709 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5710 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5711 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5712 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5713 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5714 }
5715 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5716 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5717 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5718 *DevCamDebug_af_search_pdaf_target_pos;
5719 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5720 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5721 }
5722 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5723 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5724 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5725 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5726 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5727 }
5728 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5729 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5730 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5731 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5732 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5733 }
5734 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5735 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5736 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5737 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5738 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5739 }
5740 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5741 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5742 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5743 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5744 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5745 }
5746 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5747 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5748 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5749 *DevCamDebug_af_search_tof_target_pos;
5750 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5751 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5752 }
5753 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5754 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5755 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5756 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5757 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5758 }
5759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5760 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5761 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5762 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5763 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5764 }
5765 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5766 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5767 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5768 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5769 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5770 }
5771 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5772 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5773 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5774 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5775 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5776 }
5777 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5778 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5779 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5780 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5781 &fwk_DevCamDebug_af_search_type_select, 1);
5782 }
5783 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5784 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5785 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5786 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5787 &fwk_DevCamDebug_af_search_next_pos, 1);
5788 }
5789 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5790 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5791 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5792 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5793 &fwk_DevCamDebug_af_search_target_pos, 1);
5794 }
5795 // DevCamDebug metadata translateFromHalMetadata AEC
5796 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5797 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5798 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5799 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5800 }
5801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5802 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5803 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5804 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5805 }
5806 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5807 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5808 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5809 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5810 }
5811 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5812 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5813 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5814 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5815 }
5816 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5817 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5818 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5819 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5820 }
5821 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5822 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5823 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5824 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5825 }
5826 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5827 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5828 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5829 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5830 }
5831 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5832 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5833 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5834 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5835 }
5836 // DevCamDebug metadata translateFromHalMetadata AWB
5837 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5838 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5839 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5840 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5841 }
5842 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5843 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5844 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5845 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5846 }
5847 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5848 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5849 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5850 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5851 }
5852 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5853 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5854 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5855 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5856 }
5857 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5858 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5859 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5860 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5861 }
5862 }
5863 // atrace_end(ATRACE_TAG_ALWAYS);
5864
Thierry Strudel3d639192016-09-09 11:52:26 -07005865 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5866 int64_t fwk_frame_number = *frame_number;
5867 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5868 }
5869
5870 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5871 int32_t fps_range[2];
5872 fps_range[0] = (int32_t)float_range->min_fps;
5873 fps_range[1] = (int32_t)float_range->max_fps;
5874 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5875 fps_range, 2);
5876 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5877 fps_range[0], fps_range[1]);
5878 }
5879
5880 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5881 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5882 }
5883
5884 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5885 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5886 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5887 *sceneMode);
5888 if (NAME_NOT_FOUND != val) {
5889 uint8_t fwkSceneMode = (uint8_t)val;
5890 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5891 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5892 fwkSceneMode);
5893 }
5894 }
5895
5896 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5897 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5898 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5899 }
5900
5901 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5902 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5903 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5904 }
5905
5906 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5907 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5908 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5909 }
5910
5911 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5912 CAM_INTF_META_EDGE_MODE, metadata) {
5913 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5914 }
5915
5916 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5917 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5918 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5919 }
5920
5921 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5922 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5923 }
5924
5925 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5926 if (0 <= *flashState) {
5927 uint8_t fwk_flashState = (uint8_t) *flashState;
5928 if (!gCamCapability[mCameraId]->flash_available) {
5929 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5930 }
5931 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5932 }
5933 }
5934
5935 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5936 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5937 if (NAME_NOT_FOUND != val) {
5938 uint8_t fwk_flashMode = (uint8_t)val;
5939 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5940 }
5941 }
5942
5943 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5944 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5945 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5946 }
5947
5948 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5949 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5950 }
5951
5952 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5953 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5954 }
5955
5956 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5957 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5958 }
5959
5960 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5961 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5962 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5963 }
5964
5965 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5966 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5967 LOGD("fwk_videoStab = %d", fwk_videoStab);
5968 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5969 } else {
5970 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5971 // and so hardcoding the Video Stab result to OFF mode.
5972 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5973 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005974 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 }
5976
5977 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5978 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5979 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5980 }
5981
5982 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5983 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5984 }
5985
Thierry Strudel3d639192016-09-09 11:52:26 -07005986 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5987 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005988 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07005989
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005990 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5991 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07005992
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005993 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07005994 blackLevelAppliedPattern->cam_black_level[0],
5995 blackLevelAppliedPattern->cam_black_level[1],
5996 blackLevelAppliedPattern->cam_black_level[2],
5997 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005998 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
5999 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006000
6001#ifndef USE_HAL_3_3
6002 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006003 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6004 // depth space.
6005 fwk_blackLevelInd[0] /= 4.0;
6006 fwk_blackLevelInd[1] /= 4.0;
6007 fwk_blackLevelInd[2] /= 4.0;
6008 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006009 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6010 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006011#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006012 }
6013
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006014#ifndef USE_HAL_3_3
6015 // Fixed whitelevel is used by ISP/Sensor
6016 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6017 &gCamCapability[mCameraId]->white_level, 1);
6018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006019
6020 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6021 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6022 int32_t scalerCropRegion[4];
6023 scalerCropRegion[0] = hScalerCropRegion->left;
6024 scalerCropRegion[1] = hScalerCropRegion->top;
6025 scalerCropRegion[2] = hScalerCropRegion->width;
6026 scalerCropRegion[3] = hScalerCropRegion->height;
6027
6028 // Adjust crop region from sensor output coordinate system to active
6029 // array coordinate system.
6030 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6031 scalerCropRegion[2], scalerCropRegion[3]);
6032
6033 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6034 }
6035
6036 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6037 LOGD("sensorExpTime = %lld", *sensorExpTime);
6038 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6039 }
6040
6041 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6042 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6043 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6044 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6045 }
6046
6047 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6048 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6049 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6050 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6051 sensorRollingShutterSkew, 1);
6052 }
6053
6054 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6055 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6056 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6057
6058 //calculate the noise profile based on sensitivity
6059 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6060 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6061 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6062 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6063 noise_profile[i] = noise_profile_S;
6064 noise_profile[i+1] = noise_profile_O;
6065 }
6066 LOGD("noise model entry (S, O) is (%f, %f)",
6067 noise_profile_S, noise_profile_O);
6068 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6069 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6070 }
6071
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006072#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006073 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006074 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006075 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006076 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006077 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6078 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6079 }
6080 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006081#endif
6082
Thierry Strudel3d639192016-09-09 11:52:26 -07006083 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6084 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6085 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6086 }
6087
6088 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6089 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6090 *faceDetectMode);
6091 if (NAME_NOT_FOUND != val) {
6092 uint8_t fwk_faceDetectMode = (uint8_t)val;
6093 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6094
6095 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6096 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6097 CAM_INTF_META_FACE_DETECTION, metadata) {
6098 uint8_t numFaces = MIN(
6099 faceDetectionInfo->num_faces_detected, MAX_ROI);
6100 int32_t faceIds[MAX_ROI];
6101 uint8_t faceScores[MAX_ROI];
6102 int32_t faceRectangles[MAX_ROI * 4];
6103 int32_t faceLandmarks[MAX_ROI * 6];
6104 size_t j = 0, k = 0;
6105
6106 for (size_t i = 0; i < numFaces; i++) {
6107 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6108 // Adjust crop region from sensor output coordinate system to active
6109 // array coordinate system.
6110 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6111 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6112 rect.width, rect.height);
6113
6114 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6115 faceRectangles+j, -1);
6116
6117 j+= 4;
6118 }
6119 if (numFaces <= 0) {
6120 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6121 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6122 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6123 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6124 }
6125
6126 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6127 numFaces);
6128 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6129 faceRectangles, numFaces * 4U);
6130 if (fwk_faceDetectMode ==
6131 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6132 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6133 CAM_INTF_META_FACE_LANDMARK, metadata) {
6134
6135 for (size_t i = 0; i < numFaces; i++) {
6136 // Map the co-ordinate sensor output coordinate system to active
6137 // array coordinate system.
6138 mCropRegionMapper.toActiveArray(
6139 landmarks->face_landmarks[i].left_eye_center.x,
6140 landmarks->face_landmarks[i].left_eye_center.y);
6141 mCropRegionMapper.toActiveArray(
6142 landmarks->face_landmarks[i].right_eye_center.x,
6143 landmarks->face_landmarks[i].right_eye_center.y);
6144 mCropRegionMapper.toActiveArray(
6145 landmarks->face_landmarks[i].mouth_center.x,
6146 landmarks->face_landmarks[i].mouth_center.y);
6147
6148 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006149 k+= TOTAL_LANDMARK_INDICES;
6150 }
6151 } else {
6152 for (size_t i = 0; i < numFaces; i++) {
6153 setInvalidLandmarks(faceLandmarks+k);
6154 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 }
6156 }
6157
6158 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6159 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6160 faceLandmarks, numFaces * 6U);
6161 }
6162 }
6163 }
6164 }
6165 }
6166
6167 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6168 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6169 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006170
6171 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6172 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6173 // process histogram statistics info
6174 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6175 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6176 cam_histogram_data_t rHistData, gHistData, bHistData;
6177 memset(&rHistData, 0, sizeof(rHistData));
6178 memset(&gHistData, 0, sizeof(gHistData));
6179 memset(&bHistData, 0, sizeof(bHistData));
6180
6181 switch (stats_data->type) {
6182 case CAM_HISTOGRAM_TYPE_BAYER:
6183 switch (stats_data->bayer_stats.data_type) {
6184 case CAM_STATS_CHANNEL_GR:
6185 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6186 break;
6187 case CAM_STATS_CHANNEL_GB:
6188 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6189 break;
6190 case CAM_STATS_CHANNEL_B:
6191 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6192 break;
6193 case CAM_STATS_CHANNEL_ALL:
6194 rHistData = stats_data->bayer_stats.r_stats;
6195 //Framework expects only 3 channels. So, for now,
6196 //use gb stats for G channel.
6197 gHistData = stats_data->bayer_stats.gb_stats;
6198 bHistData = stats_data->bayer_stats.b_stats;
6199 break;
6200 case CAM_STATS_CHANNEL_Y:
6201 case CAM_STATS_CHANNEL_R:
6202 default:
6203 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6204 break;
6205 }
6206 break;
6207 case CAM_HISTOGRAM_TYPE_YUV:
6208 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6209 break;
6210 }
6211
6212 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6213 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6214 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6215
6216 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6217 }
6218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006219 }
6220
6221 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6222 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6223 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6224 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6225 }
6226
6227 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6228 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6229 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6230 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6231 }
6232
6233 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6234 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6235 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6236 CAM_MAX_SHADING_MAP_HEIGHT);
6237 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6238 CAM_MAX_SHADING_MAP_WIDTH);
6239 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6240 lensShadingMap->lens_shading, 4U * map_width * map_height);
6241 }
6242
6243 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6244 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6245 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6246 }
6247
6248 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6249 //Populate CAM_INTF_META_TONEMAP_CURVES
6250 /* ch0 = G, ch 1 = B, ch 2 = R*/
6251 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6252 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6253 tonemap->tonemap_points_cnt,
6254 CAM_MAX_TONEMAP_CURVE_SIZE);
6255 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6256 }
6257
6258 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6259 &tonemap->curves[0].tonemap_points[0][0],
6260 tonemap->tonemap_points_cnt * 2);
6261
6262 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6263 &tonemap->curves[1].tonemap_points[0][0],
6264 tonemap->tonemap_points_cnt * 2);
6265
6266 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6267 &tonemap->curves[2].tonemap_points[0][0],
6268 tonemap->tonemap_points_cnt * 2);
6269 }
6270
6271 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6272 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6273 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6274 CC_GAIN_MAX);
6275 }
6276
6277 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6278 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6279 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6280 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6281 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6282 }
6283
6284 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6285 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6286 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6287 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6288 toneCurve->tonemap_points_cnt,
6289 CAM_MAX_TONEMAP_CURVE_SIZE);
6290 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6291 }
6292 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6293 (float*)toneCurve->curve.tonemap_points,
6294 toneCurve->tonemap_points_cnt * 2);
6295 }
6296
6297 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6298 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6299 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6300 predColorCorrectionGains->gains, 4);
6301 }
6302
6303 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6304 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6305 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6306 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6307 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6308 }
6309
6310 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6311 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6312 }
6313
6314 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6315 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6316 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6317 }
6318
6319 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6320 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6321 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6322 }
6323
6324 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6325 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6326 *effectMode);
6327 if (NAME_NOT_FOUND != val) {
6328 uint8_t fwk_effectMode = (uint8_t)val;
6329 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6330 }
6331 }
6332
6333 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6334 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6335 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6336 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6337 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6338 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6339 }
6340 int32_t fwk_testPatternData[4];
6341 fwk_testPatternData[0] = testPatternData->r;
6342 fwk_testPatternData[3] = testPatternData->b;
6343 switch (gCamCapability[mCameraId]->color_arrangement) {
6344 case CAM_FILTER_ARRANGEMENT_RGGB:
6345 case CAM_FILTER_ARRANGEMENT_GRBG:
6346 fwk_testPatternData[1] = testPatternData->gr;
6347 fwk_testPatternData[2] = testPatternData->gb;
6348 break;
6349 case CAM_FILTER_ARRANGEMENT_GBRG:
6350 case CAM_FILTER_ARRANGEMENT_BGGR:
6351 fwk_testPatternData[2] = testPatternData->gr;
6352 fwk_testPatternData[1] = testPatternData->gb;
6353 break;
6354 default:
6355 LOGE("color arrangement %d is not supported",
6356 gCamCapability[mCameraId]->color_arrangement);
6357 break;
6358 }
6359 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6360 }
6361
6362 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6363 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6364 }
6365
6366 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6367 String8 str((const char *)gps_methods);
6368 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6369 }
6370
6371 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6372 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6373 }
6374
6375 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6376 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6377 }
6378
6379 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6380 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6381 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6382 }
6383
6384 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6385 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6386 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6387 }
6388
6389 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6390 int32_t fwk_thumb_size[2];
6391 fwk_thumb_size[0] = thumb_size->width;
6392 fwk_thumb_size[1] = thumb_size->height;
6393 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6394 }
6395
6396 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6397 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6398 privateData,
6399 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6400 }
6401
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006402 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6403 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6404 meteringMode, 1);
6405 }
6406
Thierry Strudel3d639192016-09-09 11:52:26 -07006407 if (metadata->is_tuning_params_valid) {
6408 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6409 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6410 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6411
6412
6413 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6414 sizeof(uint32_t));
6415 data += sizeof(uint32_t);
6416
6417 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6418 sizeof(uint32_t));
6419 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6420 data += sizeof(uint32_t);
6421
6422 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6423 sizeof(uint32_t));
6424 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6425 data += sizeof(uint32_t);
6426
6427 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6428 sizeof(uint32_t));
6429 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6430 data += sizeof(uint32_t);
6431
6432 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6433 sizeof(uint32_t));
6434 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6435 data += sizeof(uint32_t);
6436
6437 metadata->tuning_params.tuning_mod3_data_size = 0;
6438 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6439 sizeof(uint32_t));
6440 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6441 data += sizeof(uint32_t);
6442
6443 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6444 TUNING_SENSOR_DATA_MAX);
6445 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6446 count);
6447 data += count;
6448
6449 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6450 TUNING_VFE_DATA_MAX);
6451 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6452 count);
6453 data += count;
6454
6455 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6456 TUNING_CPP_DATA_MAX);
6457 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6458 count);
6459 data += count;
6460
6461 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6462 TUNING_CAC_DATA_MAX);
6463 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6464 count);
6465 data += count;
6466
6467 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6468 (int32_t *)(void *)tuning_meta_data_blob,
6469 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6470 }
6471
6472 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6473 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6474 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6475 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6476 NEUTRAL_COL_POINTS);
6477 }
6478
6479 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6480 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6481 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6482 }
6483
6484 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6485 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6486 // Adjust crop region from sensor output coordinate system to active
6487 // array coordinate system.
6488 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6489 hAeRegions->rect.width, hAeRegions->rect.height);
6490
6491 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6492 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6493 REGIONS_TUPLE_COUNT);
6494 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6495 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6496 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6497 hAeRegions->rect.height);
6498 }
6499
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006500 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6501 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6502 if (NAME_NOT_FOUND != val) {
6503 uint8_t fwkAfMode = (uint8_t)val;
6504 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6505 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6506 } else {
6507 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6508 val);
6509 }
6510 }
6511
Thierry Strudel3d639192016-09-09 11:52:26 -07006512 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6513 uint8_t fwk_afState = (uint8_t) *afState;
6514 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006515 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006516 }
6517
6518 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6519 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6520 }
6521
6522 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6523 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6524 }
6525
6526 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6527 uint8_t fwk_lensState = *lensState;
6528 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6529 }
6530
6531 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6532 /*af regions*/
6533 int32_t afRegions[REGIONS_TUPLE_COUNT];
6534 // Adjust crop region from sensor output coordinate system to active
6535 // array coordinate system.
6536 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6537 hAfRegions->rect.width, hAfRegions->rect.height);
6538
6539 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6540 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6541 REGIONS_TUPLE_COUNT);
6542 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6543 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6544 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6545 hAfRegions->rect.height);
6546 }
6547
6548 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006549 uint32_t ab_mode = *hal_ab_mode;
6550 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6551 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6552 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006554 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006555 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006556 if (NAME_NOT_FOUND != val) {
6557 uint8_t fwk_ab_mode = (uint8_t)val;
6558 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6559 }
6560 }
6561
6562 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6563 int val = lookupFwkName(SCENE_MODES_MAP,
6564 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6565 if (NAME_NOT_FOUND != val) {
6566 uint8_t fwkBestshotMode = (uint8_t)val;
6567 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6568 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6569 } else {
6570 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6571 }
6572 }
6573
6574 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6575 uint8_t fwk_mode = (uint8_t) *mode;
6576 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6577 }
6578
6579 /* Constant metadata values to be update*/
6580 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6581 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6582
6583 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6584 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6585
6586 int32_t hotPixelMap[2];
6587 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6588
6589 // CDS
6590 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6591 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6592 }
6593
Thierry Strudel04e026f2016-10-10 11:27:36 -07006594 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6595 int32_t fwk_hdr;
6596 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6597 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6598 } else {
6599 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6600 }
6601 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6602 }
6603
6604 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006605 int32_t fwk_ir = (int32_t) *ir;
6606 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006607 }
6608
Thierry Strudel269c81a2016-10-12 12:13:59 -07006609 // AEC SPEED
6610 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6611 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6612 }
6613
6614 // AWB SPEED
6615 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6616 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6617 }
6618
Thierry Strudel3d639192016-09-09 11:52:26 -07006619 // TNR
6620 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6621 uint8_t tnr_enable = tnr->denoise_enable;
6622 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6623
6624 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6625 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6626 }
6627
6628 // Reprocess crop data
6629 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6630 uint8_t cnt = crop_data->num_of_streams;
6631 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6632 // mm-qcamera-daemon only posts crop_data for streams
6633 // not linked to pproc. So no valid crop metadata is not
6634 // necessarily an error case.
6635 LOGD("No valid crop metadata entries");
6636 } else {
6637 uint32_t reproc_stream_id;
6638 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6639 LOGD("No reprocessible stream found, ignore crop data");
6640 } else {
6641 int rc = NO_ERROR;
6642 Vector<int32_t> roi_map;
6643 int32_t *crop = new int32_t[cnt*4];
6644 if (NULL == crop) {
6645 rc = NO_MEMORY;
6646 }
6647 if (NO_ERROR == rc) {
6648 int32_t streams_found = 0;
6649 for (size_t i = 0; i < cnt; i++) {
6650 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6651 if (pprocDone) {
6652 // HAL already does internal reprocessing,
6653 // either via reprocessing before JPEG encoding,
6654 // or offline postprocessing for pproc bypass case.
6655 crop[0] = 0;
6656 crop[1] = 0;
6657 crop[2] = mInputStreamInfo.dim.width;
6658 crop[3] = mInputStreamInfo.dim.height;
6659 } else {
6660 crop[0] = crop_data->crop_info[i].crop.left;
6661 crop[1] = crop_data->crop_info[i].crop.top;
6662 crop[2] = crop_data->crop_info[i].crop.width;
6663 crop[3] = crop_data->crop_info[i].crop.height;
6664 }
6665 roi_map.add(crop_data->crop_info[i].roi_map.left);
6666 roi_map.add(crop_data->crop_info[i].roi_map.top);
6667 roi_map.add(crop_data->crop_info[i].roi_map.width);
6668 roi_map.add(crop_data->crop_info[i].roi_map.height);
6669 streams_found++;
6670 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6671 crop[0], crop[1], crop[2], crop[3]);
6672 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6673 crop_data->crop_info[i].roi_map.left,
6674 crop_data->crop_info[i].roi_map.top,
6675 crop_data->crop_info[i].roi_map.width,
6676 crop_data->crop_info[i].roi_map.height);
6677 break;
6678
6679 }
6680 }
6681 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6682 &streams_found, 1);
6683 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6684 crop, (size_t)(streams_found * 4));
6685 if (roi_map.array()) {
6686 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6687 roi_map.array(), roi_map.size());
6688 }
6689 }
6690 if (crop) {
6691 delete [] crop;
6692 }
6693 }
6694 }
6695 }
6696
6697 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6698 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6699 // so hardcoding the CAC result to OFF mode.
6700 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6701 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6702 } else {
6703 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6704 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6705 *cacMode);
6706 if (NAME_NOT_FOUND != val) {
6707 uint8_t resultCacMode = (uint8_t)val;
6708 // check whether CAC result from CB is equal to Framework set CAC mode
6709 // If not equal then set the CAC mode came in corresponding request
6710 if (fwk_cacMode != resultCacMode) {
6711 resultCacMode = fwk_cacMode;
6712 }
6713 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6714 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6715 } else {
6716 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6717 }
6718 }
6719 }
6720
6721 // Post blob of cam_cds_data through vendor tag.
6722 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6723 uint8_t cnt = cdsInfo->num_of_streams;
6724 cam_cds_data_t cdsDataOverride;
6725 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6726 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6727 cdsDataOverride.num_of_streams = 1;
6728 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6729 uint32_t reproc_stream_id;
6730 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6731 LOGD("No reprocessible stream found, ignore cds data");
6732 } else {
6733 for (size_t i = 0; i < cnt; i++) {
6734 if (cdsInfo->cds_info[i].stream_id ==
6735 reproc_stream_id) {
6736 cdsDataOverride.cds_info[0].cds_enable =
6737 cdsInfo->cds_info[i].cds_enable;
6738 break;
6739 }
6740 }
6741 }
6742 } else {
6743 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6744 }
6745 camMetadata.update(QCAMERA3_CDS_INFO,
6746 (uint8_t *)&cdsDataOverride,
6747 sizeof(cam_cds_data_t));
6748 }
6749
6750 // Ldaf calibration data
6751 if (!mLdafCalibExist) {
6752 IF_META_AVAILABLE(uint32_t, ldafCalib,
6753 CAM_INTF_META_LDAF_EXIF, metadata) {
6754 mLdafCalibExist = true;
6755 mLdafCalib[0] = ldafCalib[0];
6756 mLdafCalib[1] = ldafCalib[1];
6757 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6758 ldafCalib[0], ldafCalib[1]);
6759 }
6760 }
6761
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006762 // Reprocess and DDM debug data through vendor tag
6763 cam_reprocess_info_t repro_info;
6764 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006765 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6766 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006767 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006768 }
6769 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6770 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006771 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006772 }
6773 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6774 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006775 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006776 }
6777 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6778 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006779 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006780 }
6781 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6782 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006783 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006784 }
6785 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006786 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006787 }
6788 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6789 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006790 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006791 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006792 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6793 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6794 }
6795 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6796 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6797 }
6798 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6799 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006800
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006801 // INSTANT AEC MODE
6802 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6803 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6804 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6805 }
6806
Shuzhen Wange763e802016-03-31 10:24:29 -07006807 // AF scene change
6808 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6809 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6810 }
6811
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006812 /* In batch mode, cache the first metadata in the batch */
6813 if (mBatchSize && firstMetadataInBatch) {
6814 mCachedMetadata.clear();
6815 mCachedMetadata = camMetadata;
6816 }
6817
Thierry Strudel3d639192016-09-09 11:52:26 -07006818 resultMetadata = camMetadata.release();
6819 return resultMetadata;
6820}
6821
6822/*===========================================================================
6823 * FUNCTION : saveExifParams
6824 *
6825 * DESCRIPTION:
6826 *
6827 * PARAMETERS :
6828 * @metadata : metadata information from callback
6829 *
6830 * RETURN : none
6831 *
6832 *==========================================================================*/
6833void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6834{
6835 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6836 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6837 if (mExifParams.debug_params) {
6838 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6839 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6840 }
6841 }
6842 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6843 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6844 if (mExifParams.debug_params) {
6845 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6846 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6847 }
6848 }
6849 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6850 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6851 if (mExifParams.debug_params) {
6852 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6853 mExifParams.debug_params->af_debug_params_valid = TRUE;
6854 }
6855 }
6856 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6857 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6858 if (mExifParams.debug_params) {
6859 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6860 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6861 }
6862 }
6863 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6864 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6865 if (mExifParams.debug_params) {
6866 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6867 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6868 }
6869 }
6870 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6871 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6872 if (mExifParams.debug_params) {
6873 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6874 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6875 }
6876 }
6877 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6878 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6879 if (mExifParams.debug_params) {
6880 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6881 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6882 }
6883 }
6884 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6885 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6886 if (mExifParams.debug_params) {
6887 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6888 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6889 }
6890 }
6891}
6892
6893/*===========================================================================
6894 * FUNCTION : get3AExifParams
6895 *
6896 * DESCRIPTION:
6897 *
6898 * PARAMETERS : none
6899 *
6900 *
6901 * RETURN : mm_jpeg_exif_params_t
6902 *
6903 *==========================================================================*/
6904mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6905{
6906 return mExifParams;
6907}
6908
6909/*===========================================================================
6910 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6911 *
6912 * DESCRIPTION:
6913 *
6914 * PARAMETERS :
6915 * @metadata : metadata information from callback
6916 *
6917 * RETURN : camera_metadata_t*
6918 * metadata in a format specified by fwk
6919 *==========================================================================*/
6920camera_metadata_t*
6921QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6922 (metadata_buffer_t *metadata)
6923{
6924 CameraMetadata camMetadata;
6925 camera_metadata_t *resultMetadata;
6926
6927
6928 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6929 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6930 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6931 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6932 }
6933
6934 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6935 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6936 &aecTrigger->trigger, 1);
6937 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6938 &aecTrigger->trigger_id, 1);
6939 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6940 aecTrigger->trigger);
6941 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6942 aecTrigger->trigger_id);
6943 }
6944
6945 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6946 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6947 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6948 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6949 }
6950
Thierry Strudel3d639192016-09-09 11:52:26 -07006951 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6952 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6953 &af_trigger->trigger, 1);
6954 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6955 af_trigger->trigger);
6956 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6957 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6958 af_trigger->trigger_id);
6959 }
6960
6961 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6962 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6963 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6964 if (NAME_NOT_FOUND != val) {
6965 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6966 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6967 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6968 } else {
6969 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6970 }
6971 }
6972
6973 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6974 uint32_t aeMode = CAM_AE_MODE_MAX;
6975 int32_t flashMode = CAM_FLASH_MODE_MAX;
6976 int32_t redeye = -1;
6977 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6978 aeMode = *pAeMode;
6979 }
6980 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6981 flashMode = *pFlashMode;
6982 }
6983 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6984 redeye = *pRedeye;
6985 }
6986
6987 if (1 == redeye) {
6988 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6989 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6990 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6991 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6992 flashMode);
6993 if (NAME_NOT_FOUND != val) {
6994 fwk_aeMode = (uint8_t)val;
6995 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6996 } else {
6997 LOGE("Unsupported flash mode %d", flashMode);
6998 }
6999 } else if (aeMode == CAM_AE_MODE_ON) {
7000 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7001 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7002 } else if (aeMode == CAM_AE_MODE_OFF) {
7003 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7004 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7005 } else {
7006 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7007 "flashMode:%d, aeMode:%u!!!",
7008 redeye, flashMode, aeMode);
7009 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007010 if (mInstantAEC) {
7011 // Increment frame Idx count untill a bound reached for instant AEC.
7012 mInstantAecFrameIdxCount++;
7013 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7014 CAM_INTF_META_AEC_INFO, metadata) {
7015 LOGH("ae_params->settled = %d",ae_params->settled);
7016 // If AEC settled, or if number of frames reached bound value,
7017 // should reset instant AEC.
7018 if (ae_params->settled ||
7019 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7020 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7021 mInstantAEC = false;
7022 mResetInstantAEC = true;
7023 mInstantAecFrameIdxCount = 0;
7024 }
7025 }
7026 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007027 resultMetadata = camMetadata.release();
7028 return resultMetadata;
7029}
7030
7031/*===========================================================================
7032 * FUNCTION : dumpMetadataToFile
7033 *
7034 * DESCRIPTION: Dumps tuning metadata to file system
7035 *
7036 * PARAMETERS :
7037 * @meta : tuning metadata
7038 * @dumpFrameCount : current dump frame count
7039 * @enabled : Enable mask
7040 *
7041 *==========================================================================*/
7042void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7043 uint32_t &dumpFrameCount,
7044 bool enabled,
7045 const char *type,
7046 uint32_t frameNumber)
7047{
7048 //Some sanity checks
7049 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7050 LOGE("Tuning sensor data size bigger than expected %d: %d",
7051 meta.tuning_sensor_data_size,
7052 TUNING_SENSOR_DATA_MAX);
7053 return;
7054 }
7055
7056 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7057 LOGE("Tuning VFE data size bigger than expected %d: %d",
7058 meta.tuning_vfe_data_size,
7059 TUNING_VFE_DATA_MAX);
7060 return;
7061 }
7062
7063 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7064 LOGE("Tuning CPP data size bigger than expected %d: %d",
7065 meta.tuning_cpp_data_size,
7066 TUNING_CPP_DATA_MAX);
7067 return;
7068 }
7069
7070 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7071 LOGE("Tuning CAC data size bigger than expected %d: %d",
7072 meta.tuning_cac_data_size,
7073 TUNING_CAC_DATA_MAX);
7074 return;
7075 }
7076 //
7077
7078 if(enabled){
7079 char timeBuf[FILENAME_MAX];
7080 char buf[FILENAME_MAX];
7081 memset(buf, 0, sizeof(buf));
7082 memset(timeBuf, 0, sizeof(timeBuf));
7083 time_t current_time;
7084 struct tm * timeinfo;
7085 time (&current_time);
7086 timeinfo = localtime (&current_time);
7087 if (timeinfo != NULL) {
7088 strftime (timeBuf, sizeof(timeBuf),
7089 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7090 }
7091 String8 filePath(timeBuf);
7092 snprintf(buf,
7093 sizeof(buf),
7094 "%dm_%s_%d.bin",
7095 dumpFrameCount,
7096 type,
7097 frameNumber);
7098 filePath.append(buf);
7099 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7100 if (file_fd >= 0) {
7101 ssize_t written_len = 0;
7102 meta.tuning_data_version = TUNING_DATA_VERSION;
7103 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7104 written_len += write(file_fd, data, sizeof(uint32_t));
7105 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7106 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7107 written_len += write(file_fd, data, sizeof(uint32_t));
7108 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7109 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7110 written_len += write(file_fd, data, sizeof(uint32_t));
7111 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7112 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7113 written_len += write(file_fd, data, sizeof(uint32_t));
7114 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7115 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7116 written_len += write(file_fd, data, sizeof(uint32_t));
7117 meta.tuning_mod3_data_size = 0;
7118 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7119 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7120 written_len += write(file_fd, data, sizeof(uint32_t));
7121 size_t total_size = meta.tuning_sensor_data_size;
7122 data = (void *)((uint8_t *)&meta.data);
7123 written_len += write(file_fd, data, total_size);
7124 total_size = meta.tuning_vfe_data_size;
7125 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7126 written_len += write(file_fd, data, total_size);
7127 total_size = meta.tuning_cpp_data_size;
7128 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7129 written_len += write(file_fd, data, total_size);
7130 total_size = meta.tuning_cac_data_size;
7131 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7132 written_len += write(file_fd, data, total_size);
7133 close(file_fd);
7134 }else {
7135 LOGE("fail to open file for metadata dumping");
7136 }
7137 }
7138}
7139
7140/*===========================================================================
7141 * FUNCTION : cleanAndSortStreamInfo
7142 *
7143 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7144 * and sort them such that raw stream is at the end of the list
7145 * This is a workaround for camera daemon constraint.
7146 *
7147 * PARAMETERS : None
7148 *
7149 *==========================================================================*/
7150void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7151{
7152 List<stream_info_t *> newStreamInfo;
7153
7154 /*clean up invalid streams*/
7155 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7156 it != mStreamInfo.end();) {
7157 if(((*it)->status) == INVALID){
7158 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7159 delete channel;
7160 free(*it);
7161 it = mStreamInfo.erase(it);
7162 } else {
7163 it++;
7164 }
7165 }
7166
7167 // Move preview/video/callback/snapshot streams into newList
7168 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7169 it != mStreamInfo.end();) {
7170 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7171 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7172 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7173 newStreamInfo.push_back(*it);
7174 it = mStreamInfo.erase(it);
7175 } else
7176 it++;
7177 }
7178 // Move raw streams into newList
7179 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7180 it != mStreamInfo.end();) {
7181 newStreamInfo.push_back(*it);
7182 it = mStreamInfo.erase(it);
7183 }
7184
7185 mStreamInfo = newStreamInfo;
7186}
7187
7188/*===========================================================================
7189 * FUNCTION : extractJpegMetadata
7190 *
7191 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7192 * JPEG metadata is cached in HAL, and return as part of capture
7193 * result when metadata is returned from camera daemon.
7194 *
7195 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7196 * @request: capture request
7197 *
7198 *==========================================================================*/
7199void QCamera3HardwareInterface::extractJpegMetadata(
7200 CameraMetadata& jpegMetadata,
7201 const camera3_capture_request_t *request)
7202{
7203 CameraMetadata frame_settings;
7204 frame_settings = request->settings;
7205
7206 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7207 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7208 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7209 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7210
7211 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7212 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7213 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7214 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7215
7216 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7217 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7218 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7219 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7220
7221 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7222 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7223 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7224 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7225
7226 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7227 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7228 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7229 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7230
7231 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7232 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7233 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7234 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7235
7236 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7237 int32_t thumbnail_size[2];
7238 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7239 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7240 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7241 int32_t orientation =
7242 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007244 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7245 int32_t temp;
7246 temp = thumbnail_size[0];
7247 thumbnail_size[0] = thumbnail_size[1];
7248 thumbnail_size[1] = temp;
7249 }
7250 }
7251 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7252 thumbnail_size,
7253 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7254 }
7255
7256}
7257
7258/*===========================================================================
7259 * FUNCTION : convertToRegions
7260 *
7261 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7262 *
7263 * PARAMETERS :
7264 * @rect : cam_rect_t struct to convert
7265 * @region : int32_t destination array
7266 * @weight : if we are converting from cam_area_t, weight is valid
7267 * else weight = -1
7268 *
7269 *==========================================================================*/
7270void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7271 int32_t *region, int weight)
7272{
7273 region[0] = rect.left;
7274 region[1] = rect.top;
7275 region[2] = rect.left + rect.width;
7276 region[3] = rect.top + rect.height;
7277 if (weight > -1) {
7278 region[4] = weight;
7279 }
7280}
7281
7282/*===========================================================================
7283 * FUNCTION : convertFromRegions
7284 *
7285 * DESCRIPTION: helper method to convert from array to cam_rect_t
7286 *
7287 * PARAMETERS :
7288 * @rect : cam_rect_t struct to convert
7289 * @region : int32_t destination array
7290 * @weight : if we are converting from cam_area_t, weight is valid
7291 * else weight = -1
7292 *
7293 *==========================================================================*/
7294void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7295 const camera_metadata_t *settings, uint32_t tag)
7296{
7297 CameraMetadata frame_settings;
7298 frame_settings = settings;
7299 int32_t x_min = frame_settings.find(tag).data.i32[0];
7300 int32_t y_min = frame_settings.find(tag).data.i32[1];
7301 int32_t x_max = frame_settings.find(tag).data.i32[2];
7302 int32_t y_max = frame_settings.find(tag).data.i32[3];
7303 roi.weight = frame_settings.find(tag).data.i32[4];
7304 roi.rect.left = x_min;
7305 roi.rect.top = y_min;
7306 roi.rect.width = x_max - x_min;
7307 roi.rect.height = y_max - y_min;
7308}
7309
7310/*===========================================================================
7311 * FUNCTION : resetIfNeededROI
7312 *
7313 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7314 * crop region
7315 *
7316 * PARAMETERS :
7317 * @roi : cam_area_t struct to resize
7318 * @scalerCropRegion : cam_crop_region_t region to compare against
7319 *
7320 *
7321 *==========================================================================*/
7322bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7323 const cam_crop_region_t* scalerCropRegion)
7324{
7325 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7326 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7327 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7328 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7329
7330 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7331 * without having this check the calculations below to validate if the roi
7332 * is inside scalar crop region will fail resulting in the roi not being
7333 * reset causing algorithm to continue to use stale roi window
7334 */
7335 if (roi->weight == 0) {
7336 return true;
7337 }
7338
7339 if ((roi_x_max < scalerCropRegion->left) ||
7340 // right edge of roi window is left of scalar crop's left edge
7341 (roi_y_max < scalerCropRegion->top) ||
7342 // bottom edge of roi window is above scalar crop's top edge
7343 (roi->rect.left > crop_x_max) ||
7344 // left edge of roi window is beyond(right) of scalar crop's right edge
7345 (roi->rect.top > crop_y_max)){
7346 // top edge of roi windo is above scalar crop's top edge
7347 return false;
7348 }
7349 if (roi->rect.left < scalerCropRegion->left) {
7350 roi->rect.left = scalerCropRegion->left;
7351 }
7352 if (roi->rect.top < scalerCropRegion->top) {
7353 roi->rect.top = scalerCropRegion->top;
7354 }
7355 if (roi_x_max > crop_x_max) {
7356 roi_x_max = crop_x_max;
7357 }
7358 if (roi_y_max > crop_y_max) {
7359 roi_y_max = crop_y_max;
7360 }
7361 roi->rect.width = roi_x_max - roi->rect.left;
7362 roi->rect.height = roi_y_max - roi->rect.top;
7363 return true;
7364}
7365
7366/*===========================================================================
7367 * FUNCTION : convertLandmarks
7368 *
7369 * DESCRIPTION: helper method to extract the landmarks from face detection info
7370 *
7371 * PARAMETERS :
7372 * @landmark_data : input landmark data to be converted
7373 * @landmarks : int32_t destination array
7374 *
7375 *
7376 *==========================================================================*/
7377void QCamera3HardwareInterface::convertLandmarks(
7378 cam_face_landmarks_info_t landmark_data,
7379 int32_t *landmarks)
7380{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007381 if (landmark_data.is_left_eye_valid) {
7382 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7383 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7384 } else {
7385 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7386 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7387 }
7388
7389 if (landmark_data.is_right_eye_valid) {
7390 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7391 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7392 } else {
7393 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7394 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7395 }
7396
7397 if (landmark_data.is_mouth_valid) {
7398 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7399 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7400 } else {
7401 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7402 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7403 }
7404}
7405
7406/*===========================================================================
7407 * FUNCTION : setInvalidLandmarks
7408 *
7409 * DESCRIPTION: helper method to set invalid landmarks
7410 *
7411 * PARAMETERS :
7412 * @landmarks : int32_t destination array
7413 *
7414 *
7415 *==========================================================================*/
7416void QCamera3HardwareInterface::setInvalidLandmarks(
7417 int32_t *landmarks)
7418{
7419 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7420 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7421 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7422 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7423 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7424 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007425}
7426
7427#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007428
7429/*===========================================================================
7430 * FUNCTION : getCapabilities
7431 *
7432 * DESCRIPTION: query camera capability from back-end
7433 *
7434 * PARAMETERS :
7435 * @ops : mm-interface ops structure
7436 * @cam_handle : camera handle for which we need capability
7437 *
7438 * RETURN : ptr type of capability structure
7439 * capability for success
7440 * NULL for failure
7441 *==========================================================================*/
7442cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7443 uint32_t cam_handle)
7444{
7445 int rc = NO_ERROR;
7446 QCamera3HeapMemory *capabilityHeap = NULL;
7447 cam_capability_t *cap_ptr = NULL;
7448
7449 if (ops == NULL) {
7450 LOGE("Invalid arguments");
7451 return NULL;
7452 }
7453
7454 capabilityHeap = new QCamera3HeapMemory(1);
7455 if (capabilityHeap == NULL) {
7456 LOGE("creation of capabilityHeap failed");
7457 return NULL;
7458 }
7459
7460 /* Allocate memory for capability buffer */
7461 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7462 if(rc != OK) {
7463 LOGE("No memory for cappability");
7464 goto allocate_failed;
7465 }
7466
7467 /* Map memory for capability buffer */
7468 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7469
7470 rc = ops->map_buf(cam_handle,
7471 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7472 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7473 if(rc < 0) {
7474 LOGE("failed to map capability buffer");
7475 rc = FAILED_TRANSACTION;
7476 goto map_failed;
7477 }
7478
7479 /* Query Capability */
7480 rc = ops->query_capability(cam_handle);
7481 if(rc < 0) {
7482 LOGE("failed to query capability");
7483 rc = FAILED_TRANSACTION;
7484 goto query_failed;
7485 }
7486
7487 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7488 if (cap_ptr == NULL) {
7489 LOGE("out of memory");
7490 rc = NO_MEMORY;
7491 goto query_failed;
7492 }
7493
7494 memset(cap_ptr, 0, sizeof(cam_capability_t));
7495 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7496
7497 int index;
7498 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7499 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7500 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7501 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7502 }
7503
7504query_failed:
7505 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7506map_failed:
7507 capabilityHeap->deallocate();
7508allocate_failed:
7509 delete capabilityHeap;
7510
7511 if (rc != NO_ERROR) {
7512 return NULL;
7513 } else {
7514 return cap_ptr;
7515 }
7516}
7517
Thierry Strudel3d639192016-09-09 11:52:26 -07007518/*===========================================================================
7519 * FUNCTION : initCapabilities
7520 *
7521 * DESCRIPTION: initialize camera capabilities in static data struct
7522 *
7523 * PARAMETERS :
7524 * @cameraId : camera Id
7525 *
7526 * RETURN : int32_t type of status
7527 * NO_ERROR -- success
7528 * none-zero failure code
7529 *==========================================================================*/
7530int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7531{
7532 int rc = 0;
7533 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007534 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007535
7536 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7537 if (rc) {
7538 LOGE("camera_open failed. rc = %d", rc);
7539 goto open_failed;
7540 }
7541 if (!cameraHandle) {
7542 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7543 goto open_failed;
7544 }
7545
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007546 handle = get_main_camera_handle(cameraHandle->camera_handle);
7547 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7548 if (gCamCapability[cameraId] == NULL) {
7549 rc = FAILED_TRANSACTION;
7550 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007551 }
7552
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007553 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007554 if (is_dual_camera_by_idx(cameraId)) {
7555 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7556 gCamCapability[cameraId]->aux_cam_cap =
7557 getCapabilities(cameraHandle->ops, handle);
7558 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7559 rc = FAILED_TRANSACTION;
7560 free(gCamCapability[cameraId]);
7561 goto failed_op;
7562 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007563
7564 // Copy the main camera capability to main_cam_cap struct
7565 gCamCapability[cameraId]->main_cam_cap =
7566 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7567 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7568 LOGE("out of memory");
7569 rc = NO_MEMORY;
7570 goto failed_op;
7571 }
7572 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7573 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007574 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007575failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007576 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7577 cameraHandle = NULL;
7578open_failed:
7579 return rc;
7580}
7581
7582/*==========================================================================
7583 * FUNCTION : get3Aversion
7584 *
7585 * DESCRIPTION: get the Q3A S/W version
7586 *
7587 * PARAMETERS :
7588 * @sw_version: Reference of Q3A structure which will hold version info upon
7589 * return
7590 *
7591 * RETURN : None
7592 *
7593 *==========================================================================*/
7594void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7595{
7596 if(gCamCapability[mCameraId])
7597 sw_version = gCamCapability[mCameraId]->q3a_version;
7598 else
7599 LOGE("Capability structure NULL!");
7600}
7601
7602
7603/*===========================================================================
7604 * FUNCTION : initParameters
7605 *
7606 * DESCRIPTION: initialize camera parameters
7607 *
7608 * PARAMETERS :
7609 *
7610 * RETURN : int32_t type of status
7611 * NO_ERROR -- success
7612 * none-zero failure code
7613 *==========================================================================*/
7614int QCamera3HardwareInterface::initParameters()
7615{
7616 int rc = 0;
7617
7618 //Allocate Set Param Buffer
7619 mParamHeap = new QCamera3HeapMemory(1);
7620 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7621 if(rc != OK) {
7622 rc = NO_MEMORY;
7623 LOGE("Failed to allocate SETPARM Heap memory");
7624 delete mParamHeap;
7625 mParamHeap = NULL;
7626 return rc;
7627 }
7628
7629 //Map memory for parameters buffer
7630 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7631 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7632 mParamHeap->getFd(0),
7633 sizeof(metadata_buffer_t),
7634 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7635 if(rc < 0) {
7636 LOGE("failed to map SETPARM buffer");
7637 rc = FAILED_TRANSACTION;
7638 mParamHeap->deallocate();
7639 delete mParamHeap;
7640 mParamHeap = NULL;
7641 return rc;
7642 }
7643
7644 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7645
7646 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7647 return rc;
7648}
7649
7650/*===========================================================================
7651 * FUNCTION : deinitParameters
7652 *
7653 * DESCRIPTION: de-initialize camera parameters
7654 *
7655 * PARAMETERS :
7656 *
7657 * RETURN : NONE
7658 *==========================================================================*/
7659void QCamera3HardwareInterface::deinitParameters()
7660{
7661 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7662 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7663
7664 mParamHeap->deallocate();
7665 delete mParamHeap;
7666 mParamHeap = NULL;
7667
7668 mParameters = NULL;
7669
7670 free(mPrevParameters);
7671 mPrevParameters = NULL;
7672}
7673
7674/*===========================================================================
7675 * FUNCTION : calcMaxJpegSize
7676 *
7677 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7678 *
7679 * PARAMETERS :
7680 *
7681 * RETURN : max_jpeg_size
7682 *==========================================================================*/
7683size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7684{
7685 size_t max_jpeg_size = 0;
7686 size_t temp_width, temp_height;
7687 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7688 MAX_SIZES_CNT);
7689 for (size_t i = 0; i < count; i++) {
7690 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7691 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7692 if (temp_width * temp_height > max_jpeg_size ) {
7693 max_jpeg_size = temp_width * temp_height;
7694 }
7695 }
7696 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7697 return max_jpeg_size;
7698}
7699
7700/*===========================================================================
7701 * FUNCTION : getMaxRawSize
7702 *
7703 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7704 *
7705 * PARAMETERS :
7706 *
7707 * RETURN : Largest supported Raw Dimension
7708 *==========================================================================*/
7709cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7710{
7711 int max_width = 0;
7712 cam_dimension_t maxRawSize;
7713
7714 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7715 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7716 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7717 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7718 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7719 }
7720 }
7721 return maxRawSize;
7722}
7723
7724
7725/*===========================================================================
7726 * FUNCTION : calcMaxJpegDim
7727 *
7728 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7729 *
7730 * PARAMETERS :
7731 *
7732 * RETURN : max_jpeg_dim
7733 *==========================================================================*/
7734cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7735{
7736 cam_dimension_t max_jpeg_dim;
7737 cam_dimension_t curr_jpeg_dim;
7738 max_jpeg_dim.width = 0;
7739 max_jpeg_dim.height = 0;
7740 curr_jpeg_dim.width = 0;
7741 curr_jpeg_dim.height = 0;
7742 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7743 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7744 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7745 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7746 max_jpeg_dim.width * max_jpeg_dim.height ) {
7747 max_jpeg_dim.width = curr_jpeg_dim.width;
7748 max_jpeg_dim.height = curr_jpeg_dim.height;
7749 }
7750 }
7751 return max_jpeg_dim;
7752}
7753
7754/*===========================================================================
7755 * FUNCTION : addStreamConfig
7756 *
7757 * DESCRIPTION: adds the stream configuration to the array
7758 *
7759 * PARAMETERS :
7760 * @available_stream_configs : pointer to stream configuration array
7761 * @scalar_format : scalar format
7762 * @dim : configuration dimension
7763 * @config_type : input or output configuration type
7764 *
7765 * RETURN : NONE
7766 *==========================================================================*/
7767void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7768 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7769{
7770 available_stream_configs.add(scalar_format);
7771 available_stream_configs.add(dim.width);
7772 available_stream_configs.add(dim.height);
7773 available_stream_configs.add(config_type);
7774}
7775
7776/*===========================================================================
7777 * FUNCTION : suppportBurstCapture
7778 *
7779 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7780 *
7781 * PARAMETERS :
7782 * @cameraId : camera Id
7783 *
7784 * RETURN : true if camera supports BURST_CAPTURE
7785 * false otherwise
7786 *==========================================================================*/
7787bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7788{
7789 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7790 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7791 const int32_t highResWidth = 3264;
7792 const int32_t highResHeight = 2448;
7793
7794 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7795 // Maximum resolution images cannot be captured at >= 10fps
7796 // -> not supporting BURST_CAPTURE
7797 return false;
7798 }
7799
7800 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7801 // Maximum resolution images can be captured at >= 20fps
7802 // --> supporting BURST_CAPTURE
7803 return true;
7804 }
7805
7806 // Find the smallest highRes resolution, or largest resolution if there is none
7807 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7808 MAX_SIZES_CNT);
7809 size_t highRes = 0;
7810 while ((highRes + 1 < totalCnt) &&
7811 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7812 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7813 highResWidth * highResHeight)) {
7814 highRes++;
7815 }
7816 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7817 return true;
7818 } else {
7819 return false;
7820 }
7821}
7822
7823/*===========================================================================
7824 * FUNCTION : initStaticMetadata
7825 *
7826 * DESCRIPTION: initialize the static metadata
7827 *
7828 * PARAMETERS :
7829 * @cameraId : camera Id
7830 *
7831 * RETURN : int32_t type of status
7832 * 0 -- success
7833 * non-zero failure code
7834 *==========================================================================*/
7835int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7836{
7837 int rc = 0;
7838 CameraMetadata staticInfo;
7839 size_t count = 0;
7840 bool limitedDevice = false;
7841 char prop[PROPERTY_VALUE_MAX];
7842 bool supportBurst = false;
7843
7844 supportBurst = supportBurstCapture(cameraId);
7845
7846 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7847 * guaranteed or if min fps of max resolution is less than 20 fps, its
7848 * advertised as limited device*/
7849 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7850 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7851 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7852 !supportBurst;
7853
7854 uint8_t supportedHwLvl = limitedDevice ?
7855 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007856#ifndef USE_HAL_3_3
7857 // LEVEL_3 - This device will support level 3.
7858 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7859#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007860 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007861#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007862
7863 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7864 &supportedHwLvl, 1);
7865
7866 bool facingBack = false;
7867 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7868 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7869 facingBack = true;
7870 }
7871 /*HAL 3 only*/
7872 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7873 &gCamCapability[cameraId]->min_focus_distance, 1);
7874
7875 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7876 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7877
7878 /*should be using focal lengths but sensor doesn't provide that info now*/
7879 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7880 &gCamCapability[cameraId]->focal_length,
7881 1);
7882
7883 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7884 gCamCapability[cameraId]->apertures,
7885 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7886
7887 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7888 gCamCapability[cameraId]->filter_densities,
7889 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7890
7891
7892 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7893 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7894 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7895
7896 int32_t lens_shading_map_size[] = {
7897 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7898 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7899 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7900 lens_shading_map_size,
7901 sizeof(lens_shading_map_size)/sizeof(int32_t));
7902
7903 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7904 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7905
7906 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7907 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7908
7909 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7910 &gCamCapability[cameraId]->max_frame_duration, 1);
7911
7912 camera_metadata_rational baseGainFactor = {
7913 gCamCapability[cameraId]->base_gain_factor.numerator,
7914 gCamCapability[cameraId]->base_gain_factor.denominator};
7915 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7916 &baseGainFactor, 1);
7917
7918 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7919 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7920
7921 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7922 gCamCapability[cameraId]->pixel_array_size.height};
7923 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7924 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7925
7926 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7927 gCamCapability[cameraId]->active_array_size.top,
7928 gCamCapability[cameraId]->active_array_size.width,
7929 gCamCapability[cameraId]->active_array_size.height};
7930 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7931 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7932
7933 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7934 &gCamCapability[cameraId]->white_level, 1);
7935
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007936 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7937 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7938 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007939 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007940 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07007941
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007942#ifndef USE_HAL_3_3
7943 bool hasBlackRegions = false;
7944 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7945 LOGW("black_region_count: %d is bounded to %d",
7946 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7947 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7948 }
7949 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7950 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7951 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7952 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7953 }
7954 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7955 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7956 hasBlackRegions = true;
7957 }
7958#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007959 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7960 &gCamCapability[cameraId]->flash_charge_duration, 1);
7961
7962 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7963 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7964
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007965 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7966 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7967 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7968 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007969 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7970 &timestampSource, 1);
7971
7972 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7973 &gCamCapability[cameraId]->histogram_size, 1);
7974
7975 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7976 &gCamCapability[cameraId]->max_histogram_count, 1);
7977
7978 int32_t sharpness_map_size[] = {
7979 gCamCapability[cameraId]->sharpness_map_size.width,
7980 gCamCapability[cameraId]->sharpness_map_size.height};
7981
7982 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7983 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7984
7985 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7986 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7987
7988 int32_t scalar_formats[] = {
7989 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7990 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7991 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7992 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7993 HAL_PIXEL_FORMAT_RAW10,
7994 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7995 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7996 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7997 scalar_formats,
7998 scalar_formats_count);
7999
8000 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8001 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8002 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8003 count, MAX_SIZES_CNT, available_processed_sizes);
8004 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8005 available_processed_sizes, count * 2);
8006
8007 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8008 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8009 makeTable(gCamCapability[cameraId]->raw_dim,
8010 count, MAX_SIZES_CNT, available_raw_sizes);
8011 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8012 available_raw_sizes, count * 2);
8013
8014 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8015 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8016 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8017 count, MAX_SIZES_CNT, available_fps_ranges);
8018 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8019 available_fps_ranges, count * 2);
8020
8021 camera_metadata_rational exposureCompensationStep = {
8022 gCamCapability[cameraId]->exp_compensation_step.numerator,
8023 gCamCapability[cameraId]->exp_compensation_step.denominator};
8024 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8025 &exposureCompensationStep, 1);
8026
8027 Vector<uint8_t> availableVstabModes;
8028 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8029 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008030 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008031 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008032 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008033 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008034 count = IS_TYPE_MAX;
8035 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8036 for (size_t i = 0; i < count; i++) {
8037 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8038 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8039 eisSupported = true;
8040 break;
8041 }
8042 }
8043 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8045 }
8046 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8047 availableVstabModes.array(), availableVstabModes.size());
8048
8049 /*HAL 1 and HAL 3 common*/
8050 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8051 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8052 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8053 float maxZoom = maxZoomStep/minZoomStep;
8054 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8055 &maxZoom, 1);
8056
8057 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8058 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8059
8060 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8061 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8062 max3aRegions[2] = 0; /* AF not supported */
8063 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8064 max3aRegions, 3);
8065
8066 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8067 memset(prop, 0, sizeof(prop));
8068 property_get("persist.camera.facedetect", prop, "1");
8069 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8070 LOGD("Support face detection mode: %d",
8071 supportedFaceDetectMode);
8072
8073 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008074 /* support mode should be OFF if max number of face is 0 */
8075 if (maxFaces <= 0) {
8076 supportedFaceDetectMode = 0;
8077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008078 Vector<uint8_t> availableFaceDetectModes;
8079 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8080 if (supportedFaceDetectMode == 1) {
8081 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8082 } else if (supportedFaceDetectMode == 2) {
8083 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8084 } else if (supportedFaceDetectMode == 3) {
8085 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8086 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8087 } else {
8088 maxFaces = 0;
8089 }
8090 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8091 availableFaceDetectModes.array(),
8092 availableFaceDetectModes.size());
8093 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8094 (int32_t *)&maxFaces, 1);
8095
8096 int32_t exposureCompensationRange[] = {
8097 gCamCapability[cameraId]->exposure_compensation_min,
8098 gCamCapability[cameraId]->exposure_compensation_max};
8099 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8100 exposureCompensationRange,
8101 sizeof(exposureCompensationRange)/sizeof(int32_t));
8102
8103 uint8_t lensFacing = (facingBack) ?
8104 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8105 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8106
8107 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8108 available_thumbnail_sizes,
8109 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8110
8111 /*all sizes will be clubbed into this tag*/
8112 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8113 /*android.scaler.availableStreamConfigurations*/
8114 Vector<int32_t> available_stream_configs;
8115 cam_dimension_t active_array_dim;
8116 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8117 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8118 /* Add input/output stream configurations for each scalar formats*/
8119 for (size_t j = 0; j < scalar_formats_count; j++) {
8120 switch (scalar_formats[j]) {
8121 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8122 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8123 case HAL_PIXEL_FORMAT_RAW10:
8124 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8125 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8126 addStreamConfig(available_stream_configs, scalar_formats[j],
8127 gCamCapability[cameraId]->raw_dim[i],
8128 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8129 }
8130 break;
8131 case HAL_PIXEL_FORMAT_BLOB:
8132 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8133 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8134 addStreamConfig(available_stream_configs, scalar_formats[j],
8135 gCamCapability[cameraId]->picture_sizes_tbl[i],
8136 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8137 }
8138 break;
8139 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8140 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8141 default:
8142 cam_dimension_t largest_picture_size;
8143 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8144 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8145 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8146 addStreamConfig(available_stream_configs, scalar_formats[j],
8147 gCamCapability[cameraId]->picture_sizes_tbl[i],
8148 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8149 /* Book keep largest */
8150 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8151 >= largest_picture_size.width &&
8152 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8153 >= largest_picture_size.height)
8154 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8155 }
8156 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8157 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8158 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8159 addStreamConfig(available_stream_configs, scalar_formats[j],
8160 largest_picture_size,
8161 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8162 }
8163 break;
8164 }
8165 }
8166
8167 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8168 available_stream_configs.array(), available_stream_configs.size());
8169 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8170 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8171
8172 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8173 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8174
8175 /* android.scaler.availableMinFrameDurations */
8176 Vector<int64_t> available_min_durations;
8177 for (size_t j = 0; j < scalar_formats_count; j++) {
8178 switch (scalar_formats[j]) {
8179 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8180 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8181 case HAL_PIXEL_FORMAT_RAW10:
8182 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8183 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8184 available_min_durations.add(scalar_formats[j]);
8185 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8186 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8187 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8188 }
8189 break;
8190 default:
8191 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8192 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8193 available_min_durations.add(scalar_formats[j]);
8194 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8195 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8196 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8197 }
8198 break;
8199 }
8200 }
8201 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8202 available_min_durations.array(), available_min_durations.size());
8203
8204 Vector<int32_t> available_hfr_configs;
8205 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8206 int32_t fps = 0;
8207 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8208 case CAM_HFR_MODE_60FPS:
8209 fps = 60;
8210 break;
8211 case CAM_HFR_MODE_90FPS:
8212 fps = 90;
8213 break;
8214 case CAM_HFR_MODE_120FPS:
8215 fps = 120;
8216 break;
8217 case CAM_HFR_MODE_150FPS:
8218 fps = 150;
8219 break;
8220 case CAM_HFR_MODE_180FPS:
8221 fps = 180;
8222 break;
8223 case CAM_HFR_MODE_210FPS:
8224 fps = 210;
8225 break;
8226 case CAM_HFR_MODE_240FPS:
8227 fps = 240;
8228 break;
8229 case CAM_HFR_MODE_480FPS:
8230 fps = 480;
8231 break;
8232 case CAM_HFR_MODE_OFF:
8233 case CAM_HFR_MODE_MAX:
8234 default:
8235 break;
8236 }
8237
8238 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8239 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8240 /* For each HFR frame rate, need to advertise one variable fps range
8241 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8242 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8243 * set by the app. When video recording is started, [120, 120] is
8244 * set. This way sensor configuration does not change when recording
8245 * is started */
8246
8247 /* (width, height, fps_min, fps_max, batch_size_max) */
8248 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8249 j < MAX_SIZES_CNT; j++) {
8250 available_hfr_configs.add(
8251 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8252 available_hfr_configs.add(
8253 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8254 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8255 available_hfr_configs.add(fps);
8256 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8257
8258 /* (width, height, fps_min, fps_max, batch_size_max) */
8259 available_hfr_configs.add(
8260 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8261 available_hfr_configs.add(
8262 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8263 available_hfr_configs.add(fps);
8264 available_hfr_configs.add(fps);
8265 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8266 }
8267 }
8268 }
8269 //Advertise HFR capability only if the property is set
8270 memset(prop, 0, sizeof(prop));
8271 property_get("persist.camera.hal3hfr.enable", prop, "1");
8272 uint8_t hfrEnable = (uint8_t)atoi(prop);
8273
8274 if(hfrEnable && available_hfr_configs.array()) {
8275 staticInfo.update(
8276 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8277 available_hfr_configs.array(), available_hfr_configs.size());
8278 }
8279
8280 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8281 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8282 &max_jpeg_size, 1);
8283
8284 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8285 size_t size = 0;
8286 count = CAM_EFFECT_MODE_MAX;
8287 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8288 for (size_t i = 0; i < count; i++) {
8289 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8290 gCamCapability[cameraId]->supported_effects[i]);
8291 if (NAME_NOT_FOUND != val) {
8292 avail_effects[size] = (uint8_t)val;
8293 size++;
8294 }
8295 }
8296 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8297 avail_effects,
8298 size);
8299
8300 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8301 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8302 size_t supported_scene_modes_cnt = 0;
8303 count = CAM_SCENE_MODE_MAX;
8304 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8305 for (size_t i = 0; i < count; i++) {
8306 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8307 CAM_SCENE_MODE_OFF) {
8308 int val = lookupFwkName(SCENE_MODES_MAP,
8309 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8310 gCamCapability[cameraId]->supported_scene_modes[i]);
8311 if (NAME_NOT_FOUND != val) {
8312 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8313 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8314 supported_scene_modes_cnt++;
8315 }
8316 }
8317 }
8318 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8319 avail_scene_modes,
8320 supported_scene_modes_cnt);
8321
8322 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8323 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8324 supported_scene_modes_cnt,
8325 CAM_SCENE_MODE_MAX,
8326 scene_mode_overrides,
8327 supported_indexes,
8328 cameraId);
8329
8330 if (supported_scene_modes_cnt == 0) {
8331 supported_scene_modes_cnt = 1;
8332 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8333 }
8334
8335 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8336 scene_mode_overrides, supported_scene_modes_cnt * 3);
8337
8338 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8339 ANDROID_CONTROL_MODE_AUTO,
8340 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8341 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8342 available_control_modes,
8343 3);
8344
8345 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8346 size = 0;
8347 count = CAM_ANTIBANDING_MODE_MAX;
8348 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8349 for (size_t i = 0; i < count; i++) {
8350 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8351 gCamCapability[cameraId]->supported_antibandings[i]);
8352 if (NAME_NOT_FOUND != val) {
8353 avail_antibanding_modes[size] = (uint8_t)val;
8354 size++;
8355 }
8356
8357 }
8358 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8359 avail_antibanding_modes,
8360 size);
8361
8362 uint8_t avail_abberation_modes[] = {
8363 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8364 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8365 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8366 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8367 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8368 if (0 == count) {
8369 // If no aberration correction modes are available for a device, this advertise OFF mode
8370 size = 1;
8371 } else {
8372 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8373 // So, advertize all 3 modes if atleast any one mode is supported as per the
8374 // new M requirement
8375 size = 3;
8376 }
8377 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8378 avail_abberation_modes,
8379 size);
8380
8381 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8382 size = 0;
8383 count = CAM_FOCUS_MODE_MAX;
8384 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8385 for (size_t i = 0; i < count; i++) {
8386 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8387 gCamCapability[cameraId]->supported_focus_modes[i]);
8388 if (NAME_NOT_FOUND != val) {
8389 avail_af_modes[size] = (uint8_t)val;
8390 size++;
8391 }
8392 }
8393 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8394 avail_af_modes,
8395 size);
8396
8397 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8398 size = 0;
8399 count = CAM_WB_MODE_MAX;
8400 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8401 for (size_t i = 0; i < count; i++) {
8402 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8403 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8404 gCamCapability[cameraId]->supported_white_balances[i]);
8405 if (NAME_NOT_FOUND != val) {
8406 avail_awb_modes[size] = (uint8_t)val;
8407 size++;
8408 }
8409 }
8410 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8411 avail_awb_modes,
8412 size);
8413
8414 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8415 count = CAM_FLASH_FIRING_LEVEL_MAX;
8416 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8417 count);
8418 for (size_t i = 0; i < count; i++) {
8419 available_flash_levels[i] =
8420 gCamCapability[cameraId]->supported_firing_levels[i];
8421 }
8422 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8423 available_flash_levels, count);
8424
8425 uint8_t flashAvailable;
8426 if (gCamCapability[cameraId]->flash_available)
8427 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8428 else
8429 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8430 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8431 &flashAvailable, 1);
8432
8433 Vector<uint8_t> avail_ae_modes;
8434 count = CAM_AE_MODE_MAX;
8435 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8436 for (size_t i = 0; i < count; i++) {
8437 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8438 }
8439 if (flashAvailable) {
8440 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8441 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8442 }
8443 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8444 avail_ae_modes.array(),
8445 avail_ae_modes.size());
8446
8447 int32_t sensitivity_range[2];
8448 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8449 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8450 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8451 sensitivity_range,
8452 sizeof(sensitivity_range) / sizeof(int32_t));
8453
8454 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8455 &gCamCapability[cameraId]->max_analog_sensitivity,
8456 1);
8457
8458 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8459 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8460 &sensor_orientation,
8461 1);
8462
8463 int32_t max_output_streams[] = {
8464 MAX_STALLING_STREAMS,
8465 MAX_PROCESSED_STREAMS,
8466 MAX_RAW_STREAMS};
8467 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8468 max_output_streams,
8469 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8470
8471 uint8_t avail_leds = 0;
8472 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8473 &avail_leds, 0);
8474
8475 uint8_t focus_dist_calibrated;
8476 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8477 gCamCapability[cameraId]->focus_dist_calibrated);
8478 if (NAME_NOT_FOUND != val) {
8479 focus_dist_calibrated = (uint8_t)val;
8480 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8481 &focus_dist_calibrated, 1);
8482 }
8483
8484 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8485 size = 0;
8486 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8487 MAX_TEST_PATTERN_CNT);
8488 for (size_t i = 0; i < count; i++) {
8489 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8490 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8491 if (NAME_NOT_FOUND != testpatternMode) {
8492 avail_testpattern_modes[size] = testpatternMode;
8493 size++;
8494 }
8495 }
8496 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8497 avail_testpattern_modes,
8498 size);
8499
8500 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8501 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8502 &max_pipeline_depth,
8503 1);
8504
8505 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8506 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8507 &partial_result_count,
8508 1);
8509
8510 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8511 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8512
8513 Vector<uint8_t> available_capabilities;
8514 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8515 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8517 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8518 if (supportBurst) {
8519 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8520 }
8521 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8522 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8523 if (hfrEnable && available_hfr_configs.array()) {
8524 available_capabilities.add(
8525 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8526 }
8527
8528 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8529 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8530 }
8531 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8532 available_capabilities.array(),
8533 available_capabilities.size());
8534
8535 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8536 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8537 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8538 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8539
8540 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8541 &aeLockAvailable, 1);
8542
8543 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8544 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8545 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8546 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8547
8548 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8549 &awbLockAvailable, 1);
8550
8551 int32_t max_input_streams = 1;
8552 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8553 &max_input_streams,
8554 1);
8555
8556 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8557 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8558 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8559 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8560 HAL_PIXEL_FORMAT_YCbCr_420_888};
8561 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8562 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8563
8564 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8565 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8566 &max_latency,
8567 1);
8568
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008569#ifndef USE_HAL_3_3
8570 int32_t isp_sensitivity_range[2];
8571 isp_sensitivity_range[0] =
8572 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8573 isp_sensitivity_range[1] =
8574 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8575 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8576 isp_sensitivity_range,
8577 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8578#endif
8579
Thierry Strudel3d639192016-09-09 11:52:26 -07008580 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8581 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8582 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8583 available_hot_pixel_modes,
8584 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8585
8586 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8587 ANDROID_SHADING_MODE_FAST,
8588 ANDROID_SHADING_MODE_HIGH_QUALITY};
8589 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8590 available_shading_modes,
8591 3);
8592
8593 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8594 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8595 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8596 available_lens_shading_map_modes,
8597 2);
8598
8599 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8600 ANDROID_EDGE_MODE_FAST,
8601 ANDROID_EDGE_MODE_HIGH_QUALITY,
8602 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8603 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8604 available_edge_modes,
8605 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8606
8607 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8608 ANDROID_NOISE_REDUCTION_MODE_FAST,
8609 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8610 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8611 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8612 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8613 available_noise_red_modes,
8614 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8615
8616 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8617 ANDROID_TONEMAP_MODE_FAST,
8618 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8619 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8620 available_tonemap_modes,
8621 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8622
8623 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8624 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8625 available_hot_pixel_map_modes,
8626 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8627
8628 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8629 gCamCapability[cameraId]->reference_illuminant1);
8630 if (NAME_NOT_FOUND != val) {
8631 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8632 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8633 }
8634
8635 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8636 gCamCapability[cameraId]->reference_illuminant2);
8637 if (NAME_NOT_FOUND != val) {
8638 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8639 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8640 }
8641
8642 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8643 (void *)gCamCapability[cameraId]->forward_matrix1,
8644 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8645
8646 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8647 (void *)gCamCapability[cameraId]->forward_matrix2,
8648 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8649
8650 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8651 (void *)gCamCapability[cameraId]->color_transform1,
8652 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8653
8654 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8655 (void *)gCamCapability[cameraId]->color_transform2,
8656 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8657
8658 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8659 (void *)gCamCapability[cameraId]->calibration_transform1,
8660 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8661
8662 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8663 (void *)gCamCapability[cameraId]->calibration_transform2,
8664 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8665
8666 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8667 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8668 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8669 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8670 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8671 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8672 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8673 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8674 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8675 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8676 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8677 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8678 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8679 ANDROID_JPEG_GPS_COORDINATES,
8680 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8681 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8682 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8683 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8684 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8685 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8686 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8687 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8688 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8689 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008690#ifndef USE_HAL_3_3
8691 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8692#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008693 ANDROID_STATISTICS_FACE_DETECT_MODE,
8694 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8695 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8696 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008697 ANDROID_BLACK_LEVEL_LOCK,
8698 /* DevCamDebug metadata request_keys_basic */
8699 DEVCAMDEBUG_META_ENABLE,
8700 /* DevCamDebug metadata end */
8701 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008702
8703 size_t request_keys_cnt =
8704 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8705 Vector<int32_t> available_request_keys;
8706 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8707 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8708 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8709 }
8710
8711 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8712 available_request_keys.array(), available_request_keys.size());
8713
8714 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8715 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8716 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8717 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8718 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8719 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8720 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8721 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8722 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8723 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8724 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8725 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8726 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8727 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8728 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8729 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8730 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8731 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8732 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8733 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8734 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008735 ANDROID_STATISTICS_FACE_SCORES,
8736#ifndef USE_HAL_3_3
8737 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8738#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008739 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008740 // DevCamDebug metadata result_keys_basic
8741 DEVCAMDEBUG_META_ENABLE,
8742 // DevCamDebug metadata result_keys AF
8743 DEVCAMDEBUG_AF_LENS_POSITION,
8744 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8745 DEVCAMDEBUG_AF_TOF_DISTANCE,
8746 DEVCAMDEBUG_AF_LUMA,
8747 DEVCAMDEBUG_AF_HAF_STATE,
8748 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8749 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8750 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8751 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8752 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8753 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8754 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8755 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8756 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8757 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8758 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8759 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8760 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8761 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8762 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8763 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8764 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8765 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8766 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8767 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8768 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8769 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8770 // DevCamDebug metadata result_keys AEC
8771 DEVCAMDEBUG_AEC_TARGET_LUMA,
8772 DEVCAMDEBUG_AEC_COMP_LUMA,
8773 DEVCAMDEBUG_AEC_AVG_LUMA,
8774 DEVCAMDEBUG_AEC_CUR_LUMA,
8775 DEVCAMDEBUG_AEC_LINECOUNT,
8776 DEVCAMDEBUG_AEC_REAL_GAIN,
8777 DEVCAMDEBUG_AEC_EXP_INDEX,
8778 DEVCAMDEBUG_AEC_LUX_IDX,
8779 // DevCamDebug metadata result_keys AWB
8780 DEVCAMDEBUG_AWB_R_GAIN,
8781 DEVCAMDEBUG_AWB_G_GAIN,
8782 DEVCAMDEBUG_AWB_B_GAIN,
8783 DEVCAMDEBUG_AWB_CCT,
8784 DEVCAMDEBUG_AWB_DECISION,
8785 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008786 };
8787
Thierry Strudel3d639192016-09-09 11:52:26 -07008788 size_t result_keys_cnt =
8789 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8790
8791 Vector<int32_t> available_result_keys;
8792 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8793 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8794 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8795 }
8796 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8797 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8798 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8799 }
8800 if (supportedFaceDetectMode == 1) {
8801 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8802 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8803 } else if ((supportedFaceDetectMode == 2) ||
8804 (supportedFaceDetectMode == 3)) {
8805 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8806 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8807 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008808#ifndef USE_HAL_3_3
8809 if (hasBlackRegions) {
8810 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8811 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8812 }
8813#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008814 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8815 available_result_keys.array(), available_result_keys.size());
8816
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008817 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008818 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8819 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8820 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8821 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8822 ANDROID_SCALER_CROPPING_TYPE,
8823 ANDROID_SYNC_MAX_LATENCY,
8824 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8825 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8826 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8827 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8828 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8829 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8830 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8831 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8832 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8833 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8834 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8835 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8836 ANDROID_LENS_FACING,
8837 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8838 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8839 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8840 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8841 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8842 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8843 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8844 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8845 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8846 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8847 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8848 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8849 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8850 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8851 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8852 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8853 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8854 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8855 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8856 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8857 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8858 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8859 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8860 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8861 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8862 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8863 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8864 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8865 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8866 ANDROID_CONTROL_AVAILABLE_MODES,
8867 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8868 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8869 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8870 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008871 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8872#ifndef USE_HAL_3_3
8873 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8874 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8875#endif
8876 };
8877
8878 Vector<int32_t> available_characteristics_keys;
8879 available_characteristics_keys.appendArray(characteristics_keys_basic,
8880 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8881#ifndef USE_HAL_3_3
8882 if (hasBlackRegions) {
8883 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8884 }
8885#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008886 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008887 available_characteristics_keys.array(),
8888 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008889
8890 /*available stall durations depend on the hw + sw and will be different for different devices */
8891 /*have to add for raw after implementation*/
8892 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8893 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8894
8895 Vector<int64_t> available_stall_durations;
8896 for (uint32_t j = 0; j < stall_formats_count; j++) {
8897 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8898 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8899 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8900 available_stall_durations.add(stall_formats[j]);
8901 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8902 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8903 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8904 }
8905 } else {
8906 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8907 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8908 available_stall_durations.add(stall_formats[j]);
8909 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8910 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8911 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8912 }
8913 }
8914 }
8915 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8916 available_stall_durations.array(),
8917 available_stall_durations.size());
8918
8919 //QCAMERA3_OPAQUE_RAW
8920 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8921 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8922 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8923 case LEGACY_RAW:
8924 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8925 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8926 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8927 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8928 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8929 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8930 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8931 break;
8932 case MIPI_RAW:
8933 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8934 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8935 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8936 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8937 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8938 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8939 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8940 break;
8941 default:
8942 LOGE("unknown opaque_raw_format %d",
8943 gCamCapability[cameraId]->opaque_raw_fmt);
8944 break;
8945 }
8946 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8947
8948 Vector<int32_t> strides;
8949 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8950 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8951 cam_stream_buf_plane_info_t buf_planes;
8952 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8953 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8954 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8955 &gCamCapability[cameraId]->padding_info, &buf_planes);
8956 strides.add(buf_planes.plane_info.mp[0].stride);
8957 }
8958 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8959 strides.size());
8960
Thierry Strudel04e026f2016-10-10 11:27:36 -07008961 //Video HDR default
8962 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8963 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8964 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8965 int32_t vhdr_mode[] = {
8966 QCAMERA3_VIDEO_HDR_MODE_OFF,
8967 QCAMERA3_VIDEO_HDR_MODE_ON};
8968
8969 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8970 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8971 vhdr_mode, vhdr_mode_count);
8972 }
8973
Thierry Strudel3d639192016-09-09 11:52:26 -07008974 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8975 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8976 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8977
8978 uint8_t isMonoOnly =
8979 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8980 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8981 &isMonoOnly, 1);
8982
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008983#ifndef USE_HAL_3_3
8984 Vector<int32_t> opaque_size;
8985 for (size_t j = 0; j < scalar_formats_count; j++) {
8986 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8987 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8988 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8989 cam_stream_buf_plane_info_t buf_planes;
8990
8991 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8992 &gCamCapability[cameraId]->padding_info, &buf_planes);
8993
8994 if (rc == 0) {
8995 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8996 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8997 opaque_size.add(buf_planes.plane_info.frame_len);
8998 }else {
8999 LOGE("raw frame calculation failed!");
9000 }
9001 }
9002 }
9003 }
9004
9005 if ((opaque_size.size() > 0) &&
9006 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9007 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9008 else
9009 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9010#endif
9011
Thierry Strudel04e026f2016-10-10 11:27:36 -07009012 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9013 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9014 size = 0;
9015 count = CAM_IR_MODE_MAX;
9016 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9017 for (size_t i = 0; i < count; i++) {
9018 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9019 gCamCapability[cameraId]->supported_ir_modes[i]);
9020 if (NAME_NOT_FOUND != val) {
9021 avail_ir_modes[size] = (int32_t)val;
9022 size++;
9023 }
9024 }
9025 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9026 avail_ir_modes, size);
9027 }
9028
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009029 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9030 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9031 size = 0;
9032 count = CAM_AEC_CONVERGENCE_MAX;
9033 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9034 for (size_t i = 0; i < count; i++) {
9035 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9036 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9037 if (NAME_NOT_FOUND != val) {
9038 available_instant_aec_modes[size] = (int32_t)val;
9039 size++;
9040 }
9041 }
9042 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9043 available_instant_aec_modes, size);
9044 }
9045
Thierry Strudel3d639192016-09-09 11:52:26 -07009046 gStaticMetadata[cameraId] = staticInfo.release();
9047 return rc;
9048}
9049
9050/*===========================================================================
9051 * FUNCTION : makeTable
9052 *
9053 * DESCRIPTION: make a table of sizes
9054 *
9055 * PARAMETERS :
9056 *
9057 *
9058 *==========================================================================*/
9059void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9060 size_t max_size, int32_t *sizeTable)
9061{
9062 size_t j = 0;
9063 if (size > max_size) {
9064 size = max_size;
9065 }
9066 for (size_t i = 0; i < size; i++) {
9067 sizeTable[j] = dimTable[i].width;
9068 sizeTable[j+1] = dimTable[i].height;
9069 j+=2;
9070 }
9071}
9072
9073/*===========================================================================
9074 * FUNCTION : makeFPSTable
9075 *
9076 * DESCRIPTION: make a table of fps ranges
9077 *
9078 * PARAMETERS :
9079 *
9080 *==========================================================================*/
9081void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9082 size_t max_size, int32_t *fpsRangesTable)
9083{
9084 size_t j = 0;
9085 if (size > max_size) {
9086 size = max_size;
9087 }
9088 for (size_t i = 0; i < size; i++) {
9089 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9090 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9091 j+=2;
9092 }
9093}
9094
9095/*===========================================================================
9096 * FUNCTION : makeOverridesList
9097 *
9098 * DESCRIPTION: make a list of scene mode overrides
9099 *
9100 * PARAMETERS :
9101 *
9102 *
9103 *==========================================================================*/
9104void QCamera3HardwareInterface::makeOverridesList(
9105 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9106 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9107{
9108 /*daemon will give a list of overrides for all scene modes.
9109 However we should send the fwk only the overrides for the scene modes
9110 supported by the framework*/
9111 size_t j = 0;
9112 if (size > max_size) {
9113 size = max_size;
9114 }
9115 size_t focus_count = CAM_FOCUS_MODE_MAX;
9116 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9117 focus_count);
9118 for (size_t i = 0; i < size; i++) {
9119 bool supt = false;
9120 size_t index = supported_indexes[i];
9121 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9122 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9123 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9124 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9125 overridesTable[index].awb_mode);
9126 if (NAME_NOT_FOUND != val) {
9127 overridesList[j+1] = (uint8_t)val;
9128 }
9129 uint8_t focus_override = overridesTable[index].af_mode;
9130 for (size_t k = 0; k < focus_count; k++) {
9131 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9132 supt = true;
9133 break;
9134 }
9135 }
9136 if (supt) {
9137 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9138 focus_override);
9139 if (NAME_NOT_FOUND != val) {
9140 overridesList[j+2] = (uint8_t)val;
9141 }
9142 } else {
9143 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9144 }
9145 j+=3;
9146 }
9147}
9148
9149/*===========================================================================
9150 * FUNCTION : filterJpegSizes
9151 *
9152 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9153 * could be downscaled to
9154 *
9155 * PARAMETERS :
9156 *
9157 * RETURN : length of jpegSizes array
9158 *==========================================================================*/
9159
9160size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9161 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9162 uint8_t downscale_factor)
9163{
9164 if (0 == downscale_factor) {
9165 downscale_factor = 1;
9166 }
9167
9168 int32_t min_width = active_array_size.width / downscale_factor;
9169 int32_t min_height = active_array_size.height / downscale_factor;
9170 size_t jpegSizesCnt = 0;
9171 if (processedSizesCnt > maxCount) {
9172 processedSizesCnt = maxCount;
9173 }
9174 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9175 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9176 jpegSizes[jpegSizesCnt] = processedSizes[i];
9177 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9178 jpegSizesCnt += 2;
9179 }
9180 }
9181 return jpegSizesCnt;
9182}
9183
9184/*===========================================================================
9185 * FUNCTION : computeNoiseModelEntryS
9186 *
9187 * DESCRIPTION: function to map a given sensitivity to the S noise
9188 * model parameters in the DNG noise model.
9189 *
9190 * PARAMETERS : sens : the sensor sensitivity
9191 *
9192 ** RETURN : S (sensor amplification) noise
9193 *
9194 *==========================================================================*/
9195double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9196 double s = gCamCapability[mCameraId]->gradient_S * sens +
9197 gCamCapability[mCameraId]->offset_S;
9198 return ((s < 0.0) ? 0.0 : s);
9199}
9200
9201/*===========================================================================
9202 * FUNCTION : computeNoiseModelEntryO
9203 *
9204 * DESCRIPTION: function to map a given sensitivity to the O noise
9205 * model parameters in the DNG noise model.
9206 *
9207 * PARAMETERS : sens : the sensor sensitivity
9208 *
9209 ** RETURN : O (sensor readout) noise
9210 *
9211 *==========================================================================*/
9212double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9213 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9214 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9215 1.0 : (1.0 * sens / max_analog_sens);
9216 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9217 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9218 return ((o < 0.0) ? 0.0 : o);
9219}
9220
9221/*===========================================================================
9222 * FUNCTION : getSensorSensitivity
9223 *
9224 * DESCRIPTION: convert iso_mode to an integer value
9225 *
9226 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9227 *
9228 ** RETURN : sensitivity supported by sensor
9229 *
9230 *==========================================================================*/
9231int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9232{
9233 int32_t sensitivity;
9234
9235 switch (iso_mode) {
9236 case CAM_ISO_MODE_100:
9237 sensitivity = 100;
9238 break;
9239 case CAM_ISO_MODE_200:
9240 sensitivity = 200;
9241 break;
9242 case CAM_ISO_MODE_400:
9243 sensitivity = 400;
9244 break;
9245 case CAM_ISO_MODE_800:
9246 sensitivity = 800;
9247 break;
9248 case CAM_ISO_MODE_1600:
9249 sensitivity = 1600;
9250 break;
9251 default:
9252 sensitivity = -1;
9253 break;
9254 }
9255 return sensitivity;
9256}
9257
9258/*===========================================================================
9259 * FUNCTION : getCamInfo
9260 *
9261 * DESCRIPTION: query camera capabilities
9262 *
9263 * PARAMETERS :
9264 * @cameraId : camera Id
9265 * @info : camera info struct to be filled in with camera capabilities
9266 *
9267 * RETURN : int type of status
9268 * NO_ERROR -- success
9269 * none-zero failure code
9270 *==========================================================================*/
9271int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9272 struct camera_info *info)
9273{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009274 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009275 int rc = 0;
9276
9277 pthread_mutex_lock(&gCamLock);
9278 if (NULL == gCamCapability[cameraId]) {
9279 rc = initCapabilities(cameraId);
9280 if (rc < 0) {
9281 pthread_mutex_unlock(&gCamLock);
9282 return rc;
9283 }
9284 }
9285
9286 if (NULL == gStaticMetadata[cameraId]) {
9287 rc = initStaticMetadata(cameraId);
9288 if (rc < 0) {
9289 pthread_mutex_unlock(&gCamLock);
9290 return rc;
9291 }
9292 }
9293
9294 switch(gCamCapability[cameraId]->position) {
9295 case CAM_POSITION_BACK:
9296 case CAM_POSITION_BACK_AUX:
9297 info->facing = CAMERA_FACING_BACK;
9298 break;
9299
9300 case CAM_POSITION_FRONT:
9301 case CAM_POSITION_FRONT_AUX:
9302 info->facing = CAMERA_FACING_FRONT;
9303 break;
9304
9305 default:
9306 LOGE("Unknown position type %d for camera id:%d",
9307 gCamCapability[cameraId]->position, cameraId);
9308 rc = -1;
9309 break;
9310 }
9311
9312
9313 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009314#ifndef USE_HAL_3_3
9315 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9316#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009317 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009318#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009319 info->static_camera_characteristics = gStaticMetadata[cameraId];
9320
9321 //For now assume both cameras can operate independently.
9322 info->conflicting_devices = NULL;
9323 info->conflicting_devices_length = 0;
9324
9325 //resource cost is 100 * MIN(1.0, m/M),
9326 //where m is throughput requirement with maximum stream configuration
9327 //and M is CPP maximum throughput.
9328 float max_fps = 0.0;
9329 for (uint32_t i = 0;
9330 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9331 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9332 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9333 }
9334 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9335 gCamCapability[cameraId]->active_array_size.width *
9336 gCamCapability[cameraId]->active_array_size.height * max_fps /
9337 gCamCapability[cameraId]->max_pixel_bandwidth;
9338 info->resource_cost = 100 * MIN(1.0, ratio);
9339 LOGI("camera %d resource cost is %d", cameraId,
9340 info->resource_cost);
9341
9342 pthread_mutex_unlock(&gCamLock);
9343 return rc;
9344}
9345
9346/*===========================================================================
9347 * FUNCTION : translateCapabilityToMetadata
9348 *
9349 * DESCRIPTION: translate the capability into camera_metadata_t
9350 *
9351 * PARAMETERS : type of the request
9352 *
9353 *
9354 * RETURN : success: camera_metadata_t*
9355 * failure: NULL
9356 *
9357 *==========================================================================*/
9358camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9359{
9360 if (mDefaultMetadata[type] != NULL) {
9361 return mDefaultMetadata[type];
9362 }
9363 //first time we are handling this request
9364 //fill up the metadata structure using the wrapper class
9365 CameraMetadata settings;
9366 //translate from cam_capability_t to camera_metadata_tag_t
9367 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9368 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9369 int32_t defaultRequestID = 0;
9370 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9371
9372 /* OIS disable */
9373 char ois_prop[PROPERTY_VALUE_MAX];
9374 memset(ois_prop, 0, sizeof(ois_prop));
9375 property_get("persist.camera.ois.disable", ois_prop, "0");
9376 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9377
9378 /* Force video to use OIS */
9379 char videoOisProp[PROPERTY_VALUE_MAX];
9380 memset(videoOisProp, 0, sizeof(videoOisProp));
9381 property_get("persist.camera.ois.video", videoOisProp, "1");
9382 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009383
9384 // Hybrid AE enable/disable
9385 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9386 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9387 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9388 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9389
Thierry Strudel3d639192016-09-09 11:52:26 -07009390 uint8_t controlIntent = 0;
9391 uint8_t focusMode;
9392 uint8_t vsMode;
9393 uint8_t optStabMode;
9394 uint8_t cacMode;
9395 uint8_t edge_mode;
9396 uint8_t noise_red_mode;
9397 uint8_t tonemap_mode;
9398 bool highQualityModeEntryAvailable = FALSE;
9399 bool fastModeEntryAvailable = FALSE;
9400 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9401 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9402 switch (type) {
9403 case CAMERA3_TEMPLATE_PREVIEW:
9404 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9405 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9406 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9407 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9408 edge_mode = ANDROID_EDGE_MODE_FAST;
9409 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9410 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9411 break;
9412 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9413 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9414 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9415 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9416 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9417 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9418 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9419 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9420 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9421 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9422 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9423 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9424 highQualityModeEntryAvailable = TRUE;
9425 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9426 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9427 fastModeEntryAvailable = TRUE;
9428 }
9429 }
9430 if (highQualityModeEntryAvailable) {
9431 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9432 } else if (fastModeEntryAvailable) {
9433 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9434 }
9435 break;
9436 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9437 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9438 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9439 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009440 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9441 edge_mode = ANDROID_EDGE_MODE_FAST;
9442 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9443 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9444 if (forceVideoOis)
9445 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9446 break;
9447 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9448 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9449 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9450 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009451 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9452 edge_mode = ANDROID_EDGE_MODE_FAST;
9453 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9454 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9455 if (forceVideoOis)
9456 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9457 break;
9458 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9459 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9460 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9461 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9462 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9463 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9464 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9465 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9466 break;
9467 case CAMERA3_TEMPLATE_MANUAL:
9468 edge_mode = ANDROID_EDGE_MODE_FAST;
9469 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9470 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9471 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9472 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9473 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9474 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9475 break;
9476 default:
9477 edge_mode = ANDROID_EDGE_MODE_FAST;
9478 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9479 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9480 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9481 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9482 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9483 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9484 break;
9485 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009486 // Set CAC to OFF if underlying device doesn't support
9487 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9488 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9489 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009490 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9491 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9492 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9493 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9494 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9495 }
9496 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9497
9498 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9499 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9500 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9501 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9502 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9503 || ois_disable)
9504 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9505 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9506
9507 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9508 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9509
9510 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9511 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9512
9513 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9514 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9515
9516 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9517 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9518
9519 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9520 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9521
9522 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9523 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9524
9525 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9526 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9527
9528 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9529 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9530
9531 /*flash*/
9532 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9533 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9534
9535 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9536 settings.update(ANDROID_FLASH_FIRING_POWER,
9537 &flashFiringLevel, 1);
9538
9539 /* lens */
9540 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9541 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9542
9543 if (gCamCapability[mCameraId]->filter_densities_count) {
9544 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9545 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9546 gCamCapability[mCameraId]->filter_densities_count);
9547 }
9548
9549 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9550 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9551
9552 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9553 float default_focus_distance = 0;
9554 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9555 }
9556
9557 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9558 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9559
9560 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9561 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9562
9563 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9564 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9565
9566 /* face detection (default to OFF) */
9567 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9568 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9569
9570 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9571 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9572
9573 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9574 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9575
9576 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9577 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9578
9579 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9580 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9581
9582 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9583 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9584
9585 /* Exposure time(Update the Min Exposure Time)*/
9586 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9587 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9588
9589 /* frame duration */
9590 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9591 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9592
9593 /* sensitivity */
9594 static const int32_t default_sensitivity = 100;
9595 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009596#ifndef USE_HAL_3_3
9597 static const int32_t default_isp_sensitivity =
9598 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9599 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9600#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009601
9602 /*edge mode*/
9603 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9604
9605 /*noise reduction mode*/
9606 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9607
9608 /*color correction mode*/
9609 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9610 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9611
9612 /*transform matrix mode*/
9613 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9614
9615 int32_t scaler_crop_region[4];
9616 scaler_crop_region[0] = 0;
9617 scaler_crop_region[1] = 0;
9618 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9619 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9620 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9621
9622 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9623 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9624
9625 /*focus distance*/
9626 float focus_distance = 0.0;
9627 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9628
9629 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009630 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009631 float max_range = 0.0;
9632 float max_fixed_fps = 0.0;
9633 int32_t fps_range[2] = {0, 0};
9634 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9635 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009636 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9637 TEMPLATE_MAX_PREVIEW_FPS) {
9638 continue;
9639 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009640 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9641 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9642 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9643 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9644 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9645 if (range > max_range) {
9646 fps_range[0] =
9647 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9648 fps_range[1] =
9649 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9650 max_range = range;
9651 }
9652 } else {
9653 if (range < 0.01 && max_fixed_fps <
9654 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9655 fps_range[0] =
9656 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9657 fps_range[1] =
9658 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9659 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9660 }
9661 }
9662 }
9663 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9664
9665 /*precapture trigger*/
9666 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9667 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9668
9669 /*af trigger*/
9670 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9671 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9672
9673 /* ae & af regions */
9674 int32_t active_region[] = {
9675 gCamCapability[mCameraId]->active_array_size.left,
9676 gCamCapability[mCameraId]->active_array_size.top,
9677 gCamCapability[mCameraId]->active_array_size.left +
9678 gCamCapability[mCameraId]->active_array_size.width,
9679 gCamCapability[mCameraId]->active_array_size.top +
9680 gCamCapability[mCameraId]->active_array_size.height,
9681 0};
9682 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9683 sizeof(active_region) / sizeof(active_region[0]));
9684 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9685 sizeof(active_region) / sizeof(active_region[0]));
9686
9687 /* black level lock */
9688 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9689 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9690
9691 /* lens shading map mode */
9692 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9693 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9694 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9695 }
9696 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9697
9698 //special defaults for manual template
9699 if (type == CAMERA3_TEMPLATE_MANUAL) {
9700 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9701 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9702
9703 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9704 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9705
9706 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9707 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9708
9709 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9710 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9711
9712 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9713 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9714
9715 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9716 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9717 }
9718
9719
9720 /* TNR
9721 * We'll use this location to determine which modes TNR will be set.
9722 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9723 * This is not to be confused with linking on a per stream basis that decision
9724 * is still on per-session basis and will be handled as part of config stream
9725 */
9726 uint8_t tnr_enable = 0;
9727
9728 if (m_bTnrPreview || m_bTnrVideo) {
9729
9730 switch (type) {
9731 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9732 tnr_enable = 1;
9733 break;
9734
9735 default:
9736 tnr_enable = 0;
9737 break;
9738 }
9739
9740 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9741 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9742 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9743
9744 LOGD("TNR:%d with process plate %d for template:%d",
9745 tnr_enable, tnr_process_type, type);
9746 }
9747
9748 //Update Link tags to default
9749 int32_t sync_type = CAM_TYPE_STANDALONE;
9750 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9751
9752 int32_t is_main = 0; //this doesn't matter as app should overwrite
9753 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9754
9755 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9756
9757 /* CDS default */
9758 char prop[PROPERTY_VALUE_MAX];
9759 memset(prop, 0, sizeof(prop));
9760 property_get("persist.camera.CDS", prop, "Auto");
9761 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9762 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9763 if (CAM_CDS_MODE_MAX == cds_mode) {
9764 cds_mode = CAM_CDS_MODE_AUTO;
9765 }
9766
9767 /* Disabling CDS in templates which have TNR enabled*/
9768 if (tnr_enable)
9769 cds_mode = CAM_CDS_MODE_OFF;
9770
9771 int32_t mode = cds_mode;
9772 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009773
9774 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9775 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9776
9777 /* IR Mode Default Off */
9778 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9779 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9780
Thierry Strudel269c81a2016-10-12 12:13:59 -07009781 /* Manual Convergence AEC Speed is disabled by default*/
9782 float default_aec_speed = 0;
9783 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9784
9785 /* Manual Convergence AWB Speed is disabled by default*/
9786 float default_awb_speed = 0;
9787 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9788
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009789 // Set instant AEC to normal convergence by default
9790 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9791 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9792
Shuzhen Wang19463d72016-03-08 11:09:52 -08009793 /* hybrid ae */
9794 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9795
Thierry Strudel3d639192016-09-09 11:52:26 -07009796 mDefaultMetadata[type] = settings.release();
9797
9798 return mDefaultMetadata[type];
9799}
9800
9801/*===========================================================================
9802 * FUNCTION : setFrameParameters
9803 *
9804 * DESCRIPTION: set parameters per frame as requested in the metadata from
9805 * framework
9806 *
9807 * PARAMETERS :
9808 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009809 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009810 * @blob_request: Whether this request is a blob request or not
9811 *
9812 * RETURN : success: NO_ERROR
9813 * failure:
9814 *==========================================================================*/
9815int QCamera3HardwareInterface::setFrameParameters(
9816 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009817 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009818 int blob_request,
9819 uint32_t snapshotStreamId)
9820{
9821 /*translate from camera_metadata_t type to parm_type_t*/
9822 int rc = 0;
9823 int32_t hal_version = CAM_HAL_V3;
9824
9825 clear_metadata_buffer(mParameters);
9826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9827 LOGE("Failed to set hal version in the parameters");
9828 return BAD_VALUE;
9829 }
9830
9831 /*we need to update the frame number in the parameters*/
9832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9833 request->frame_number)) {
9834 LOGE("Failed to set the frame number in the parameters");
9835 return BAD_VALUE;
9836 }
9837
9838 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009840 LOGE("Failed to set stream type mask in the parameters");
9841 return BAD_VALUE;
9842 }
9843
9844 if (mUpdateDebugLevel) {
9845 uint32_t dummyDebugLevel = 0;
9846 /* The value of dummyDebugLevel is irrelavent. On
9847 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9849 dummyDebugLevel)) {
9850 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9851 return BAD_VALUE;
9852 }
9853 mUpdateDebugLevel = false;
9854 }
9855
9856 if(request->settings != NULL){
9857 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9858 if (blob_request)
9859 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9860 }
9861
9862 return rc;
9863}
9864
9865/*===========================================================================
9866 * FUNCTION : setReprocParameters
9867 *
9868 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9869 * return it.
9870 *
9871 * PARAMETERS :
9872 * @request : request that needs to be serviced
9873 *
9874 * RETURN : success: NO_ERROR
9875 * failure:
9876 *==========================================================================*/
9877int32_t QCamera3HardwareInterface::setReprocParameters(
9878 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9879 uint32_t snapshotStreamId)
9880{
9881 /*translate from camera_metadata_t type to parm_type_t*/
9882 int rc = 0;
9883
9884 if (NULL == request->settings){
9885 LOGE("Reprocess settings cannot be NULL");
9886 return BAD_VALUE;
9887 }
9888
9889 if (NULL == reprocParam) {
9890 LOGE("Invalid reprocessing metadata buffer");
9891 return BAD_VALUE;
9892 }
9893 clear_metadata_buffer(reprocParam);
9894
9895 /*we need to update the frame number in the parameters*/
9896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9897 request->frame_number)) {
9898 LOGE("Failed to set the frame number in the parameters");
9899 return BAD_VALUE;
9900 }
9901
9902 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9903 if (rc < 0) {
9904 LOGE("Failed to translate reproc request");
9905 return rc;
9906 }
9907
9908 CameraMetadata frame_settings;
9909 frame_settings = request->settings;
9910 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9911 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9912 int32_t *crop_count =
9913 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9914 int32_t *crop_data =
9915 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9916 int32_t *roi_map =
9917 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9918 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9919 cam_crop_data_t crop_meta;
9920 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9921 crop_meta.num_of_streams = 1;
9922 crop_meta.crop_info[0].crop.left = crop_data[0];
9923 crop_meta.crop_info[0].crop.top = crop_data[1];
9924 crop_meta.crop_info[0].crop.width = crop_data[2];
9925 crop_meta.crop_info[0].crop.height = crop_data[3];
9926
9927 crop_meta.crop_info[0].roi_map.left =
9928 roi_map[0];
9929 crop_meta.crop_info[0].roi_map.top =
9930 roi_map[1];
9931 crop_meta.crop_info[0].roi_map.width =
9932 roi_map[2];
9933 crop_meta.crop_info[0].roi_map.height =
9934 roi_map[3];
9935
9936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9937 rc = BAD_VALUE;
9938 }
9939 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9940 request->input_buffer->stream,
9941 crop_meta.crop_info[0].crop.left,
9942 crop_meta.crop_info[0].crop.top,
9943 crop_meta.crop_info[0].crop.width,
9944 crop_meta.crop_info[0].crop.height);
9945 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9946 request->input_buffer->stream,
9947 crop_meta.crop_info[0].roi_map.left,
9948 crop_meta.crop_info[0].roi_map.top,
9949 crop_meta.crop_info[0].roi_map.width,
9950 crop_meta.crop_info[0].roi_map.height);
9951 } else {
9952 LOGE("Invalid reprocess crop count %d!", *crop_count);
9953 }
9954 } else {
9955 LOGE("No crop data from matching output stream");
9956 }
9957
9958 /* These settings are not needed for regular requests so handle them specially for
9959 reprocess requests; information needed for EXIF tags */
9960 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9961 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9962 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9963 if (NAME_NOT_FOUND != val) {
9964 uint32_t flashMode = (uint32_t)val;
9965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9966 rc = BAD_VALUE;
9967 }
9968 } else {
9969 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9970 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9971 }
9972 } else {
9973 LOGH("No flash mode in reprocess settings");
9974 }
9975
9976 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9977 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9978 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9979 rc = BAD_VALUE;
9980 }
9981 } else {
9982 LOGH("No flash state in reprocess settings");
9983 }
9984
9985 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9986 uint8_t *reprocessFlags =
9987 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9989 *reprocessFlags)) {
9990 rc = BAD_VALUE;
9991 }
9992 }
9993
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009994 // Add metadata which reprocess needs
9995 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9996 cam_reprocess_info_t *repro_info =
9997 (cam_reprocess_info_t *)frame_settings.find
9998 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -07009999 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010000 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010001 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010002 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010003 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010004 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010005 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010006 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010007 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010008 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010009 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010010 repro_info->pipeline_flip);
10011 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10012 repro_info->af_roi);
10013 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10014 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010015 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10016 CAM_INTF_PARM_ROTATION metadata then has been added in
10017 translateToHalMetadata. HAL need to keep this new rotation
10018 metadata. Otherwise, the old rotation info saved in the vendor tag
10019 would be used */
10020 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10021 CAM_INTF_PARM_ROTATION, reprocParam) {
10022 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10023 } else {
10024 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010025 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010026 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010027 }
10028
10029 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10030 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10031 roi.width and roi.height would be the final JPEG size.
10032 For now, HAL only checks this for reprocess request */
10033 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10034 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10035 uint8_t *enable =
10036 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10037 if (*enable == TRUE) {
10038 int32_t *crop_data =
10039 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10040 cam_stream_crop_info_t crop_meta;
10041 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10042 crop_meta.stream_id = 0;
10043 crop_meta.crop.left = crop_data[0];
10044 crop_meta.crop.top = crop_data[1];
10045 crop_meta.crop.width = crop_data[2];
10046 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010047 // The JPEG crop roi should match cpp output size
10048 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10049 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10050 crop_meta.roi_map.left = 0;
10051 crop_meta.roi_map.top = 0;
10052 crop_meta.roi_map.width = cpp_crop->crop.width;
10053 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010054 }
10055 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10056 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010057 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010058 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010059 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10060 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010061 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010062 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10063
10064 // Add JPEG scale information
10065 cam_dimension_t scale_dim;
10066 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10067 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10068 int32_t *roi =
10069 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10070 scale_dim.width = roi[2];
10071 scale_dim.height = roi[3];
10072 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10073 scale_dim);
10074 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10075 scale_dim.width, scale_dim.height, mCameraId);
10076 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010077 }
10078 }
10079
10080 return rc;
10081}
10082
10083/*===========================================================================
10084 * FUNCTION : saveRequestSettings
10085 *
10086 * DESCRIPTION: Add any settings that might have changed to the request settings
10087 * and save the settings to be applied on the frame
10088 *
10089 * PARAMETERS :
10090 * @jpegMetadata : the extracted and/or modified jpeg metadata
10091 * @request : request with initial settings
10092 *
10093 * RETURN :
10094 * camera_metadata_t* : pointer to the saved request settings
10095 *==========================================================================*/
10096camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10097 const CameraMetadata &jpegMetadata,
10098 camera3_capture_request_t *request)
10099{
10100 camera_metadata_t *resultMetadata;
10101 CameraMetadata camMetadata;
10102 camMetadata = request->settings;
10103
10104 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10105 int32_t thumbnail_size[2];
10106 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10107 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10108 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10109 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10110 }
10111
10112 if (request->input_buffer != NULL) {
10113 uint8_t reprocessFlags = 1;
10114 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10115 (uint8_t*)&reprocessFlags,
10116 sizeof(reprocessFlags));
10117 }
10118
10119 resultMetadata = camMetadata.release();
10120 return resultMetadata;
10121}
10122
10123/*===========================================================================
10124 * FUNCTION : setHalFpsRange
10125 *
10126 * DESCRIPTION: set FPS range parameter
10127 *
10128 *
10129 * PARAMETERS :
10130 * @settings : Metadata from framework
10131 * @hal_metadata: Metadata buffer
10132 *
10133 *
10134 * RETURN : success: NO_ERROR
10135 * failure:
10136 *==========================================================================*/
10137int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10138 metadata_buffer_t *hal_metadata)
10139{
10140 int32_t rc = NO_ERROR;
10141 cam_fps_range_t fps_range;
10142 fps_range.min_fps = (float)
10143 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10144 fps_range.max_fps = (float)
10145 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10146 fps_range.video_min_fps = fps_range.min_fps;
10147 fps_range.video_max_fps = fps_range.max_fps;
10148
10149 LOGD("aeTargetFpsRange fps: [%f %f]",
10150 fps_range.min_fps, fps_range.max_fps);
10151 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10152 * follows:
10153 * ---------------------------------------------------------------|
10154 * Video stream is absent in configure_streams |
10155 * (Camcorder preview before the first video record |
10156 * ---------------------------------------------------------------|
10157 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10158 * | | | vid_min/max_fps|
10159 * ---------------------------------------------------------------|
10160 * NO | [ 30, 240] | 240 | [240, 240] |
10161 * |-------------|-------------|----------------|
10162 * | [240, 240] | 240 | [240, 240] |
10163 * ---------------------------------------------------------------|
10164 * Video stream is present in configure_streams |
10165 * ---------------------------------------------------------------|
10166 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10167 * | | | vid_min/max_fps|
10168 * ---------------------------------------------------------------|
10169 * NO | [ 30, 240] | 240 | [240, 240] |
10170 * (camcorder prev |-------------|-------------|----------------|
10171 * after video rec | [240, 240] | 240 | [240, 240] |
10172 * is stopped) | | | |
10173 * ---------------------------------------------------------------|
10174 * YES | [ 30, 240] | 240 | [240, 240] |
10175 * |-------------|-------------|----------------|
10176 * | [240, 240] | 240 | [240, 240] |
10177 * ---------------------------------------------------------------|
10178 * When Video stream is absent in configure_streams,
10179 * preview fps = sensor_fps / batchsize
10180 * Eg: for 240fps at batchSize 4, preview = 60fps
10181 * for 120fps at batchSize 4, preview = 30fps
10182 *
10183 * When video stream is present in configure_streams, preview fps is as per
10184 * the ratio of preview buffers to video buffers requested in process
10185 * capture request
10186 */
10187 mBatchSize = 0;
10188 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10189 fps_range.min_fps = fps_range.video_max_fps;
10190 fps_range.video_min_fps = fps_range.video_max_fps;
10191 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10192 fps_range.max_fps);
10193 if (NAME_NOT_FOUND != val) {
10194 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10195 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10196 return BAD_VALUE;
10197 }
10198
10199 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10200 /* If batchmode is currently in progress and the fps changes,
10201 * set the flag to restart the sensor */
10202 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10203 (mHFRVideoFps != fps_range.max_fps)) {
10204 mNeedSensorRestart = true;
10205 }
10206 mHFRVideoFps = fps_range.max_fps;
10207 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10208 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10209 mBatchSize = MAX_HFR_BATCH_SIZE;
10210 }
10211 }
10212 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10213
10214 }
10215 } else {
10216 /* HFR mode is session param in backend/ISP. This should be reset when
10217 * in non-HFR mode */
10218 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10220 return BAD_VALUE;
10221 }
10222 }
10223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10224 return BAD_VALUE;
10225 }
10226 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10227 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10228 return rc;
10229}
10230
10231/*===========================================================================
10232 * FUNCTION : translateToHalMetadata
10233 *
10234 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10235 *
10236 *
10237 * PARAMETERS :
10238 * @request : request sent from framework
10239 *
10240 *
10241 * RETURN : success: NO_ERROR
10242 * failure:
10243 *==========================================================================*/
10244int QCamera3HardwareInterface::translateToHalMetadata
10245 (const camera3_capture_request_t *request,
10246 metadata_buffer_t *hal_metadata,
10247 uint32_t snapshotStreamId)
10248{
10249 int rc = 0;
10250 CameraMetadata frame_settings;
10251 frame_settings = request->settings;
10252
10253 /* Do not change the order of the following list unless you know what you are
10254 * doing.
10255 * The order is laid out in such a way that parameters in the front of the table
10256 * may be used to override the parameters later in the table. Examples are:
10257 * 1. META_MODE should precede AEC/AWB/AF MODE
10258 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10259 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10260 * 4. Any mode should precede it's corresponding settings
10261 */
10262 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10263 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10265 rc = BAD_VALUE;
10266 }
10267 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10268 if (rc != NO_ERROR) {
10269 LOGE("extractSceneMode failed");
10270 }
10271 }
10272
10273 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10274 uint8_t fwk_aeMode =
10275 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10276 uint8_t aeMode;
10277 int32_t redeye;
10278
10279 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10280 aeMode = CAM_AE_MODE_OFF;
10281 } else {
10282 aeMode = CAM_AE_MODE_ON;
10283 }
10284 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10285 redeye = 1;
10286 } else {
10287 redeye = 0;
10288 }
10289
10290 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10291 fwk_aeMode);
10292 if (NAME_NOT_FOUND != val) {
10293 int32_t flashMode = (int32_t)val;
10294 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10295 }
10296
10297 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10298 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10299 rc = BAD_VALUE;
10300 }
10301 }
10302
10303 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10304 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10305 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10306 fwk_whiteLevel);
10307 if (NAME_NOT_FOUND != val) {
10308 uint8_t whiteLevel = (uint8_t)val;
10309 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10310 rc = BAD_VALUE;
10311 }
10312 }
10313 }
10314
10315 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10316 uint8_t fwk_cacMode =
10317 frame_settings.find(
10318 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10319 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10320 fwk_cacMode);
10321 if (NAME_NOT_FOUND != val) {
10322 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10323 bool entryAvailable = FALSE;
10324 // Check whether Frameworks set CAC mode is supported in device or not
10325 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10326 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10327 entryAvailable = TRUE;
10328 break;
10329 }
10330 }
10331 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10332 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10333 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10334 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10335 if (entryAvailable == FALSE) {
10336 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10337 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10338 } else {
10339 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10340 // High is not supported and so set the FAST as spec say's underlying
10341 // device implementation can be the same for both modes.
10342 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10343 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10344 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10345 // in order to avoid the fps drop due to high quality
10346 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10347 } else {
10348 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10349 }
10350 }
10351 }
10352 LOGD("Final cacMode is %d", cacMode);
10353 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10354 rc = BAD_VALUE;
10355 }
10356 } else {
10357 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10358 }
10359 }
10360
10361 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10362 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10363 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10364 fwk_focusMode);
10365 if (NAME_NOT_FOUND != val) {
10366 uint8_t focusMode = (uint8_t)val;
10367 LOGD("set focus mode %d", focusMode);
10368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10369 rc = BAD_VALUE;
10370 }
10371 }
10372 }
10373
10374 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10375 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10377 focalDistance)) {
10378 rc = BAD_VALUE;
10379 }
10380 }
10381
10382 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10383 uint8_t fwk_antibandingMode =
10384 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10385 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10386 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10387 if (NAME_NOT_FOUND != val) {
10388 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010389 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10390 if (m60HzZone) {
10391 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10392 } else {
10393 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10394 }
10395 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10397 hal_antibandingMode)) {
10398 rc = BAD_VALUE;
10399 }
10400 }
10401 }
10402
10403 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10404 int32_t expCompensation = frame_settings.find(
10405 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10406 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10407 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10408 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10409 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010410 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10412 expCompensation)) {
10413 rc = BAD_VALUE;
10414 }
10415 }
10416
10417 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10418 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10419 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10420 rc = BAD_VALUE;
10421 }
10422 }
10423 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10424 rc = setHalFpsRange(frame_settings, hal_metadata);
10425 if (rc != NO_ERROR) {
10426 LOGE("setHalFpsRange failed");
10427 }
10428 }
10429
10430 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10431 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10433 rc = BAD_VALUE;
10434 }
10435 }
10436
10437 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10438 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10439 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10440 fwk_effectMode);
10441 if (NAME_NOT_FOUND != val) {
10442 uint8_t effectMode = (uint8_t)val;
10443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10444 rc = BAD_VALUE;
10445 }
10446 }
10447 }
10448
10449 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10450 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10452 colorCorrectMode)) {
10453 rc = BAD_VALUE;
10454 }
10455 }
10456
10457 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10458 cam_color_correct_gains_t colorCorrectGains;
10459 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10460 colorCorrectGains.gains[i] =
10461 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10462 }
10463 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10464 colorCorrectGains)) {
10465 rc = BAD_VALUE;
10466 }
10467 }
10468
10469 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10470 cam_color_correct_matrix_t colorCorrectTransform;
10471 cam_rational_type_t transform_elem;
10472 size_t num = 0;
10473 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10474 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10475 transform_elem.numerator =
10476 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10477 transform_elem.denominator =
10478 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10479 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10480 num++;
10481 }
10482 }
10483 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10484 colorCorrectTransform)) {
10485 rc = BAD_VALUE;
10486 }
10487 }
10488
10489 cam_trigger_t aecTrigger;
10490 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10491 aecTrigger.trigger_id = -1;
10492 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10493 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10494 aecTrigger.trigger =
10495 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10496 aecTrigger.trigger_id =
10497 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10499 aecTrigger)) {
10500 rc = BAD_VALUE;
10501 }
10502 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10503 aecTrigger.trigger, aecTrigger.trigger_id);
10504 }
10505
10506 /*af_trigger must come with a trigger id*/
10507 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10508 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10509 cam_trigger_t af_trigger;
10510 af_trigger.trigger =
10511 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10512 af_trigger.trigger_id =
10513 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10515 rc = BAD_VALUE;
10516 }
10517 LOGD("AfTrigger: %d AfTriggerID: %d",
10518 af_trigger.trigger, af_trigger.trigger_id);
10519 }
10520
10521 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10522 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10524 rc = BAD_VALUE;
10525 }
10526 }
10527 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10528 cam_edge_application_t edge_application;
10529 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10530 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10531 edge_application.sharpness = 0;
10532 } else {
10533 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10534 }
10535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10536 rc = BAD_VALUE;
10537 }
10538 }
10539
10540 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10541 int32_t respectFlashMode = 1;
10542 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10543 uint8_t fwk_aeMode =
10544 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10545 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10546 respectFlashMode = 0;
10547 LOGH("AE Mode controls flash, ignore android.flash.mode");
10548 }
10549 }
10550 if (respectFlashMode) {
10551 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10552 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10553 LOGH("flash mode after mapping %d", val);
10554 // To check: CAM_INTF_META_FLASH_MODE usage
10555 if (NAME_NOT_FOUND != val) {
10556 uint8_t flashMode = (uint8_t)val;
10557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10558 rc = BAD_VALUE;
10559 }
10560 }
10561 }
10562 }
10563
10564 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10565 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10566 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10567 rc = BAD_VALUE;
10568 }
10569 }
10570
10571 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10572 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10574 flashFiringTime)) {
10575 rc = BAD_VALUE;
10576 }
10577 }
10578
10579 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10580 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10582 hotPixelMode)) {
10583 rc = BAD_VALUE;
10584 }
10585 }
10586
10587 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10588 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10589 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10590 lensAperture)) {
10591 rc = BAD_VALUE;
10592 }
10593 }
10594
10595 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10596 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10597 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10598 filterDensity)) {
10599 rc = BAD_VALUE;
10600 }
10601 }
10602
10603 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10604 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10606 focalLength)) {
10607 rc = BAD_VALUE;
10608 }
10609 }
10610
10611 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10612 uint8_t optStabMode =
10613 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10615 optStabMode)) {
10616 rc = BAD_VALUE;
10617 }
10618 }
10619
10620 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10621 uint8_t videoStabMode =
10622 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10623 LOGD("videoStabMode from APP = %d", videoStabMode);
10624 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10625 videoStabMode)) {
10626 rc = BAD_VALUE;
10627 }
10628 }
10629
10630
10631 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10632 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10634 noiseRedMode)) {
10635 rc = BAD_VALUE;
10636 }
10637 }
10638
10639 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10640 float reprocessEffectiveExposureFactor =
10641 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10643 reprocessEffectiveExposureFactor)) {
10644 rc = BAD_VALUE;
10645 }
10646 }
10647
10648 cam_crop_region_t scalerCropRegion;
10649 bool scalerCropSet = false;
10650 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10651 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10652 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10653 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10654 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10655
10656 // Map coordinate system from active array to sensor output.
10657 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10658 scalerCropRegion.width, scalerCropRegion.height);
10659
10660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10661 scalerCropRegion)) {
10662 rc = BAD_VALUE;
10663 }
10664 scalerCropSet = true;
10665 }
10666
10667 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10668 int64_t sensorExpTime =
10669 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10670 LOGD("setting sensorExpTime %lld", sensorExpTime);
10671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10672 sensorExpTime)) {
10673 rc = BAD_VALUE;
10674 }
10675 }
10676
10677 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10678 int64_t sensorFrameDuration =
10679 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10680 int64_t minFrameDuration = getMinFrameDuration(request);
10681 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10682 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10683 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10684 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10685 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10686 sensorFrameDuration)) {
10687 rc = BAD_VALUE;
10688 }
10689 }
10690
10691 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10692 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10693 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10694 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10695 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10696 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10697 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10699 sensorSensitivity)) {
10700 rc = BAD_VALUE;
10701 }
10702 }
10703
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010704#ifndef USE_HAL_3_3
10705 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10706 int32_t ispSensitivity =
10707 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10708 if (ispSensitivity <
10709 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10710 ispSensitivity =
10711 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10712 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10713 }
10714 if (ispSensitivity >
10715 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10716 ispSensitivity =
10717 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10718 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10719 }
10720 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10721 ispSensitivity)) {
10722 rc = BAD_VALUE;
10723 }
10724 }
10725#endif
10726
Thierry Strudel3d639192016-09-09 11:52:26 -070010727 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10728 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10730 rc = BAD_VALUE;
10731 }
10732 }
10733
10734 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10735 uint8_t fwk_facedetectMode =
10736 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10737
10738 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10739 fwk_facedetectMode);
10740
10741 if (NAME_NOT_FOUND != val) {
10742 uint8_t facedetectMode = (uint8_t)val;
10743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10744 facedetectMode)) {
10745 rc = BAD_VALUE;
10746 }
10747 }
10748 }
10749
10750 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10751 uint8_t histogramMode =
10752 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10753 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10754 histogramMode)) {
10755 rc = BAD_VALUE;
10756 }
10757 }
10758
10759 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10760 uint8_t sharpnessMapMode =
10761 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10763 sharpnessMapMode)) {
10764 rc = BAD_VALUE;
10765 }
10766 }
10767
10768 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10769 uint8_t tonemapMode =
10770 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10772 rc = BAD_VALUE;
10773 }
10774 }
10775 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10776 /*All tonemap channels will have the same number of points*/
10777 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10778 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10779 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10780 cam_rgb_tonemap_curves tonemapCurves;
10781 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10782 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10783 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10784 tonemapCurves.tonemap_points_cnt,
10785 CAM_MAX_TONEMAP_CURVE_SIZE);
10786 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10787 }
10788
10789 /* ch0 = G*/
10790 size_t point = 0;
10791 cam_tonemap_curve_t tonemapCurveGreen;
10792 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10793 for (size_t j = 0; j < 2; j++) {
10794 tonemapCurveGreen.tonemap_points[i][j] =
10795 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10796 point++;
10797 }
10798 }
10799 tonemapCurves.curves[0] = tonemapCurveGreen;
10800
10801 /* ch 1 = B */
10802 point = 0;
10803 cam_tonemap_curve_t tonemapCurveBlue;
10804 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10805 for (size_t j = 0; j < 2; j++) {
10806 tonemapCurveBlue.tonemap_points[i][j] =
10807 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10808 point++;
10809 }
10810 }
10811 tonemapCurves.curves[1] = tonemapCurveBlue;
10812
10813 /* ch 2 = R */
10814 point = 0;
10815 cam_tonemap_curve_t tonemapCurveRed;
10816 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10817 for (size_t j = 0; j < 2; j++) {
10818 tonemapCurveRed.tonemap_points[i][j] =
10819 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10820 point++;
10821 }
10822 }
10823 tonemapCurves.curves[2] = tonemapCurveRed;
10824
10825 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10826 tonemapCurves)) {
10827 rc = BAD_VALUE;
10828 }
10829 }
10830
10831 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10832 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10834 captureIntent)) {
10835 rc = BAD_VALUE;
10836 }
10837 }
10838
10839 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10840 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10842 blackLevelLock)) {
10843 rc = BAD_VALUE;
10844 }
10845 }
10846
10847 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10848 uint8_t lensShadingMapMode =
10849 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10850 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10851 lensShadingMapMode)) {
10852 rc = BAD_VALUE;
10853 }
10854 }
10855
10856 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10857 cam_area_t roi;
10858 bool reset = true;
10859 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10860
10861 // Map coordinate system from active array to sensor output.
10862 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10863 roi.rect.height);
10864
10865 if (scalerCropSet) {
10866 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10867 }
10868 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10869 rc = BAD_VALUE;
10870 }
10871 }
10872
10873 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10874 cam_area_t roi;
10875 bool reset = true;
10876 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10877
10878 // Map coordinate system from active array to sensor output.
10879 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10880 roi.rect.height);
10881
10882 if (scalerCropSet) {
10883 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10884 }
10885 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10886 rc = BAD_VALUE;
10887 }
10888 }
10889
10890 // CDS for non-HFR non-video mode
10891 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10892 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10893 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10894 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10895 LOGE("Invalid CDS mode %d!", *fwk_cds);
10896 } else {
10897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10898 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10899 rc = BAD_VALUE;
10900 }
10901 }
10902 }
10903
Thierry Strudel04e026f2016-10-10 11:27:36 -070010904 // Video HDR
10905 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10906 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10907 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10908 rc = setVideoHdrMode(mParameters, vhdr);
10909 if (rc != NO_ERROR) {
10910 LOGE("setVideoHDR is failed");
10911 }
10912 }
10913
10914 //IR
10915 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10916 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10917 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10918 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10919 LOGE("Invalid IR mode %d!", fwk_ir);
10920 } else {
10921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10922 CAM_INTF_META_IR_MODE, fwk_ir)) {
10923 rc = BAD_VALUE;
10924 }
10925 }
10926 }
10927
Thierry Strudel269c81a2016-10-12 12:13:59 -070010928 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10929 float aec_speed;
10930 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10931 LOGD("AEC Speed :%f", aec_speed);
10932 if ( aec_speed < 0 ) {
10933 LOGE("Invalid AEC mode %f!", aec_speed);
10934 } else {
10935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10936 aec_speed)) {
10937 rc = BAD_VALUE;
10938 }
10939 }
10940 }
10941
10942 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10943 float awb_speed;
10944 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10945 LOGD("AWB Speed :%f", awb_speed);
10946 if ( awb_speed < 0 ) {
10947 LOGE("Invalid AWB mode %f!", awb_speed);
10948 } else {
10949 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10950 awb_speed)) {
10951 rc = BAD_VALUE;
10952 }
10953 }
10954 }
10955
Thierry Strudel3d639192016-09-09 11:52:26 -070010956 // TNR
10957 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10958 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10959 uint8_t b_TnrRequested = 0;
10960 cam_denoise_param_t tnr;
10961 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10962 tnr.process_plates =
10963 (cam_denoise_process_type_t)frame_settings.find(
10964 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10965 b_TnrRequested = tnr.denoise_enable;
10966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10967 rc = BAD_VALUE;
10968 }
10969 }
10970
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010971 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10972 int32_t* exposure_metering_mode =
10973 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10975 *exposure_metering_mode)) {
10976 rc = BAD_VALUE;
10977 }
10978 }
10979
Thierry Strudel3d639192016-09-09 11:52:26 -070010980 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10981 int32_t fwk_testPatternMode =
10982 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10983 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10984 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10985
10986 if (NAME_NOT_FOUND != testPatternMode) {
10987 cam_test_pattern_data_t testPatternData;
10988 memset(&testPatternData, 0, sizeof(testPatternData));
10989 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10990 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10991 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10992 int32_t *fwk_testPatternData =
10993 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10994 testPatternData.r = fwk_testPatternData[0];
10995 testPatternData.b = fwk_testPatternData[3];
10996 switch (gCamCapability[mCameraId]->color_arrangement) {
10997 case CAM_FILTER_ARRANGEMENT_RGGB:
10998 case CAM_FILTER_ARRANGEMENT_GRBG:
10999 testPatternData.gr = fwk_testPatternData[1];
11000 testPatternData.gb = fwk_testPatternData[2];
11001 break;
11002 case CAM_FILTER_ARRANGEMENT_GBRG:
11003 case CAM_FILTER_ARRANGEMENT_BGGR:
11004 testPatternData.gr = fwk_testPatternData[2];
11005 testPatternData.gb = fwk_testPatternData[1];
11006 break;
11007 default:
11008 LOGE("color arrangement %d is not supported",
11009 gCamCapability[mCameraId]->color_arrangement);
11010 break;
11011 }
11012 }
11013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11014 testPatternData)) {
11015 rc = BAD_VALUE;
11016 }
11017 } else {
11018 LOGE("Invalid framework sensor test pattern mode %d",
11019 fwk_testPatternMode);
11020 }
11021 }
11022
11023 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11024 size_t count = 0;
11025 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11026 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11027 gps_coords.data.d, gps_coords.count, count);
11028 if (gps_coords.count != count) {
11029 rc = BAD_VALUE;
11030 }
11031 }
11032
11033 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11034 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11035 size_t count = 0;
11036 const char *gps_methods_src = (const char *)
11037 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11038 memset(gps_methods, '\0', sizeof(gps_methods));
11039 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11040 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11041 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11042 if (GPS_PROCESSING_METHOD_SIZE != count) {
11043 rc = BAD_VALUE;
11044 }
11045 }
11046
11047 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11048 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11049 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11050 gps_timestamp)) {
11051 rc = BAD_VALUE;
11052 }
11053 }
11054
11055 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11056 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11057 cam_rotation_info_t rotation_info;
11058 if (orientation == 0) {
11059 rotation_info.rotation = ROTATE_0;
11060 } else if (orientation == 90) {
11061 rotation_info.rotation = ROTATE_90;
11062 } else if (orientation == 180) {
11063 rotation_info.rotation = ROTATE_180;
11064 } else if (orientation == 270) {
11065 rotation_info.rotation = ROTATE_270;
11066 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011067 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011068 rotation_info.streamId = snapshotStreamId;
11069 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11071 rc = BAD_VALUE;
11072 }
11073 }
11074
11075 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11076 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11078 rc = BAD_VALUE;
11079 }
11080 }
11081
11082 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11083 uint32_t thumb_quality = (uint32_t)
11084 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11085 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11086 thumb_quality)) {
11087 rc = BAD_VALUE;
11088 }
11089 }
11090
11091 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11092 cam_dimension_t dim;
11093 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11094 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11096 rc = BAD_VALUE;
11097 }
11098 }
11099
11100 // Internal metadata
11101 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11102 size_t count = 0;
11103 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11104 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11105 privatedata.data.i32, privatedata.count, count);
11106 if (privatedata.count != count) {
11107 rc = BAD_VALUE;
11108 }
11109 }
11110
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011111 // ISO/Exposure Priority
11112 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11113 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11114 cam_priority_mode_t mode =
11115 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11116 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11117 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11118 use_iso_exp_pty.previewOnly = FALSE;
11119 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11120 use_iso_exp_pty.value = *ptr;
11121
11122 if(CAM_ISO_PRIORITY == mode) {
11123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11124 use_iso_exp_pty)) {
11125 rc = BAD_VALUE;
11126 }
11127 }
11128 else {
11129 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11130 use_iso_exp_pty)) {
11131 rc = BAD_VALUE;
11132 }
11133 }
11134 }
11135 }
11136
11137 // Saturation
11138 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11139 int32_t* use_saturation =
11140 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11142 rc = BAD_VALUE;
11143 }
11144 }
11145
Thierry Strudel3d639192016-09-09 11:52:26 -070011146 // EV step
11147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11148 gCamCapability[mCameraId]->exp_compensation_step)) {
11149 rc = BAD_VALUE;
11150 }
11151
11152 // CDS info
11153 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11154 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11155 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11156
11157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11158 CAM_INTF_META_CDS_DATA, *cdsData)) {
11159 rc = BAD_VALUE;
11160 }
11161 }
11162
Shuzhen Wang19463d72016-03-08 11:09:52 -080011163 // Hybrid AE
11164 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11165 uint8_t *hybrid_ae = (uint8_t *)
11166 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11167
11168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11169 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11170 rc = BAD_VALUE;
11171 }
11172 }
11173
Thierry Strudel3d639192016-09-09 11:52:26 -070011174 return rc;
11175}
11176
11177/*===========================================================================
11178 * FUNCTION : captureResultCb
11179 *
11180 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11181 *
11182 * PARAMETERS :
11183 * @frame : frame information from mm-camera-interface
11184 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11185 * @userdata: userdata
11186 *
11187 * RETURN : NONE
11188 *==========================================================================*/
11189void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11190 camera3_stream_buffer_t *buffer,
11191 uint32_t frame_number, bool isInputBuffer, void *userdata)
11192{
11193 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11194 if (hw == NULL) {
11195 LOGE("Invalid hw %p", hw);
11196 return;
11197 }
11198
11199 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11200 return;
11201}
11202
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011203/*===========================================================================
11204 * FUNCTION : setBufferErrorStatus
11205 *
11206 * DESCRIPTION: Callback handler for channels to report any buffer errors
11207 *
11208 * PARAMETERS :
11209 * @ch : Channel on which buffer error is reported from
11210 * @frame_number : frame number on which buffer error is reported on
11211 * @buffer_status : buffer error status
11212 * @userdata: userdata
11213 *
11214 * RETURN : NONE
11215 *==========================================================================*/
11216void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11217 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11218{
11219 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11220 if (hw == NULL) {
11221 LOGE("Invalid hw %p", hw);
11222 return;
11223 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011224
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011225 hw->setBufferErrorStatus(ch, frame_number, err);
11226 return;
11227}
11228
11229void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11230 uint32_t frameNumber, camera3_buffer_status_t err)
11231{
11232 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11233 pthread_mutex_lock(&mMutex);
11234
11235 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11236 if (req.frame_number != frameNumber)
11237 continue;
11238 for (auto& k : req.mPendingBufferList) {
11239 if(k.stream->priv == ch) {
11240 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11241 }
11242 }
11243 }
11244
11245 pthread_mutex_unlock(&mMutex);
11246 return;
11247}
Thierry Strudel3d639192016-09-09 11:52:26 -070011248/*===========================================================================
11249 * FUNCTION : initialize
11250 *
11251 * DESCRIPTION: Pass framework callback pointers to HAL
11252 *
11253 * PARAMETERS :
11254 *
11255 *
11256 * RETURN : Success : 0
11257 * Failure: -ENODEV
11258 *==========================================================================*/
11259
11260int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11261 const camera3_callback_ops_t *callback_ops)
11262{
11263 LOGD("E");
11264 QCamera3HardwareInterface *hw =
11265 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11266 if (!hw) {
11267 LOGE("NULL camera device");
11268 return -ENODEV;
11269 }
11270
11271 int rc = hw->initialize(callback_ops);
11272 LOGD("X");
11273 return rc;
11274}
11275
11276/*===========================================================================
11277 * FUNCTION : configure_streams
11278 *
11279 * DESCRIPTION:
11280 *
11281 * PARAMETERS :
11282 *
11283 *
11284 * RETURN : Success: 0
11285 * Failure: -EINVAL (if stream configuration is invalid)
11286 * -ENODEV (fatal error)
11287 *==========================================================================*/
11288
11289int QCamera3HardwareInterface::configure_streams(
11290 const struct camera3_device *device,
11291 camera3_stream_configuration_t *stream_list)
11292{
11293 LOGD("E");
11294 QCamera3HardwareInterface *hw =
11295 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11296 if (!hw) {
11297 LOGE("NULL camera device");
11298 return -ENODEV;
11299 }
11300 int rc = hw->configureStreams(stream_list);
11301 LOGD("X");
11302 return rc;
11303}
11304
11305/*===========================================================================
11306 * FUNCTION : construct_default_request_settings
11307 *
11308 * DESCRIPTION: Configure a settings buffer to meet the required use case
11309 *
11310 * PARAMETERS :
11311 *
11312 *
11313 * RETURN : Success: Return valid metadata
11314 * Failure: Return NULL
11315 *==========================================================================*/
11316const camera_metadata_t* QCamera3HardwareInterface::
11317 construct_default_request_settings(const struct camera3_device *device,
11318 int type)
11319{
11320
11321 LOGD("E");
11322 camera_metadata_t* fwk_metadata = NULL;
11323 QCamera3HardwareInterface *hw =
11324 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11325 if (!hw) {
11326 LOGE("NULL camera device");
11327 return NULL;
11328 }
11329
11330 fwk_metadata = hw->translateCapabilityToMetadata(type);
11331
11332 LOGD("X");
11333 return fwk_metadata;
11334}
11335
11336/*===========================================================================
11337 * FUNCTION : process_capture_request
11338 *
11339 * DESCRIPTION:
11340 *
11341 * PARAMETERS :
11342 *
11343 *
11344 * RETURN :
11345 *==========================================================================*/
11346int QCamera3HardwareInterface::process_capture_request(
11347 const struct camera3_device *device,
11348 camera3_capture_request_t *request)
11349{
11350 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011351 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011352 QCamera3HardwareInterface *hw =
11353 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11354 if (!hw) {
11355 LOGE("NULL camera device");
11356 return -EINVAL;
11357 }
11358
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011359 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 LOGD("X");
11361 return rc;
11362}
11363
11364/*===========================================================================
11365 * FUNCTION : dump
11366 *
11367 * DESCRIPTION:
11368 *
11369 * PARAMETERS :
11370 *
11371 *
11372 * RETURN :
11373 *==========================================================================*/
11374
11375void QCamera3HardwareInterface::dump(
11376 const struct camera3_device *device, int fd)
11377{
11378 /* Log level property is read when "adb shell dumpsys media.camera" is
11379 called so that the log level can be controlled without restarting
11380 the media server */
11381 getLogLevel();
11382
11383 LOGD("E");
11384 QCamera3HardwareInterface *hw =
11385 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11386 if (!hw) {
11387 LOGE("NULL camera device");
11388 return;
11389 }
11390
11391 hw->dump(fd);
11392 LOGD("X");
11393 return;
11394}
11395
11396/*===========================================================================
11397 * FUNCTION : flush
11398 *
11399 * DESCRIPTION:
11400 *
11401 * PARAMETERS :
11402 *
11403 *
11404 * RETURN :
11405 *==========================================================================*/
11406
11407int QCamera3HardwareInterface::flush(
11408 const struct camera3_device *device)
11409{
11410 int rc;
11411 LOGD("E");
11412 QCamera3HardwareInterface *hw =
11413 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11414 if (!hw) {
11415 LOGE("NULL camera device");
11416 return -EINVAL;
11417 }
11418
11419 pthread_mutex_lock(&hw->mMutex);
11420 // Validate current state
11421 switch (hw->mState) {
11422 case STARTED:
11423 /* valid state */
11424 break;
11425
11426 case ERROR:
11427 pthread_mutex_unlock(&hw->mMutex);
11428 hw->handleCameraDeviceError();
11429 return -ENODEV;
11430
11431 default:
11432 LOGI("Flush returned during state %d", hw->mState);
11433 pthread_mutex_unlock(&hw->mMutex);
11434 return 0;
11435 }
11436 pthread_mutex_unlock(&hw->mMutex);
11437
11438 rc = hw->flush(true /* restart channels */ );
11439 LOGD("X");
11440 return rc;
11441}
11442
11443/*===========================================================================
11444 * FUNCTION : close_camera_device
11445 *
11446 * DESCRIPTION:
11447 *
11448 * PARAMETERS :
11449 *
11450 *
11451 * RETURN :
11452 *==========================================================================*/
11453int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11454{
11455 int ret = NO_ERROR;
11456 QCamera3HardwareInterface *hw =
11457 reinterpret_cast<QCamera3HardwareInterface *>(
11458 reinterpret_cast<camera3_device_t *>(device)->priv);
11459 if (!hw) {
11460 LOGE("NULL camera device");
11461 return BAD_VALUE;
11462 }
11463
11464 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11465 delete hw;
11466 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011467 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011468 return ret;
11469}
11470
11471/*===========================================================================
11472 * FUNCTION : getWaveletDenoiseProcessPlate
11473 *
11474 * DESCRIPTION: query wavelet denoise process plate
11475 *
11476 * PARAMETERS : None
11477 *
11478 * RETURN : WNR prcocess plate value
11479 *==========================================================================*/
11480cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11481{
11482 char prop[PROPERTY_VALUE_MAX];
11483 memset(prop, 0, sizeof(prop));
11484 property_get("persist.denoise.process.plates", prop, "0");
11485 int processPlate = atoi(prop);
11486 switch(processPlate) {
11487 case 0:
11488 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11489 case 1:
11490 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11491 case 2:
11492 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11493 case 3:
11494 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11495 default:
11496 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11497 }
11498}
11499
11500
11501/*===========================================================================
11502 * FUNCTION : getTemporalDenoiseProcessPlate
11503 *
11504 * DESCRIPTION: query temporal denoise process plate
11505 *
11506 * PARAMETERS : None
11507 *
11508 * RETURN : TNR prcocess plate value
11509 *==========================================================================*/
11510cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11511{
11512 char prop[PROPERTY_VALUE_MAX];
11513 memset(prop, 0, sizeof(prop));
11514 property_get("persist.tnr.process.plates", prop, "0");
11515 int processPlate = atoi(prop);
11516 switch(processPlate) {
11517 case 0:
11518 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11519 case 1:
11520 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11521 case 2:
11522 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11523 case 3:
11524 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11525 default:
11526 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11527 }
11528}
11529
11530
11531/*===========================================================================
11532 * FUNCTION : extractSceneMode
11533 *
11534 * DESCRIPTION: Extract scene mode from frameworks set metadata
11535 *
11536 * PARAMETERS :
11537 * @frame_settings: CameraMetadata reference
11538 * @metaMode: ANDROID_CONTORL_MODE
11539 * @hal_metadata: hal metadata structure
11540 *
11541 * RETURN : None
11542 *==========================================================================*/
11543int32_t QCamera3HardwareInterface::extractSceneMode(
11544 const CameraMetadata &frame_settings, uint8_t metaMode,
11545 metadata_buffer_t *hal_metadata)
11546{
11547 int32_t rc = NO_ERROR;
11548
11549 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11550 camera_metadata_ro_entry entry =
11551 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11552 if (0 == entry.count)
11553 return rc;
11554
11555 uint8_t fwk_sceneMode = entry.data.u8[0];
11556
11557 int val = lookupHalName(SCENE_MODES_MAP,
11558 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11559 fwk_sceneMode);
11560 if (NAME_NOT_FOUND != val) {
11561 uint8_t sceneMode = (uint8_t)val;
11562 LOGD("sceneMode: %d", sceneMode);
11563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11564 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11565 rc = BAD_VALUE;
11566 }
11567 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011568
11569 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11570 cam_hdr_param_t hdr_params;
11571 hdr_params.hdr_enable = 1;
11572 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11573 hdr_params.hdr_need_1x = false;
11574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11575 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11576 rc = BAD_VALUE;
11577 }
11578 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011579 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11580 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11581 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11582 LOGD("sceneMode: %d", sceneMode);
11583 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11584 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11585 rc = BAD_VALUE;
11586 }
11587 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011588
11589 if (mForceHdrSnapshot) {
11590 cam_hdr_param_t hdr_params;
11591 hdr_params.hdr_enable = 1;
11592 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11593 hdr_params.hdr_need_1x = false;
11594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11595 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11596 rc = BAD_VALUE;
11597 }
11598 }
11599
Thierry Strudel3d639192016-09-09 11:52:26 -070011600 return rc;
11601}
11602
11603/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011604 * FUNCTION : setVideoHdrMode
11605 *
11606 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11607 *
11608 * PARAMETERS :
11609 * @hal_metadata: hal metadata structure
11610 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11611 *
11612 * RETURN : None
11613 *==========================================================================*/
11614int32_t QCamera3HardwareInterface::setVideoHdrMode(
11615 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11616{
11617 int32_t rc = NO_ERROR;
11618 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11619 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11620 rc = BAD_VALUE;
11621 } else {
11622 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11623 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11624 LOGD("Setting HDR mode Off");
11625 vhdr_type = CAM_SENSOR_HDR_OFF;
11626 } else {
11627 char video_hdr_prop[PROPERTY_VALUE_MAX];
11628 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11629 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11630 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11631 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11632 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11633 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11634 LOGD("Setting HDR mode In Sensor");
11635 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11636 }
11637 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11638 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11639 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11640 LOGD("Setting HDR mode Zigzag");
11641 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11642 }
11643 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11644 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11645 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11646 LOGD("Setting HDR mode Staggered");
11647 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11648 }
11649 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11650 LOGD("HDR mode not supported");
11651 rc = BAD_VALUE;
11652 }
11653 }
11654 if(rc == NO_ERROR) {
11655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11656 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11657 rc = BAD_VALUE;
11658 }
11659 }
11660 }
11661 return rc;
11662}
11663
11664/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011665 * FUNCTION : needRotationReprocess
11666 *
11667 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11668 *
11669 * PARAMETERS : none
11670 *
11671 * RETURN : true: needed
11672 * false: no need
11673 *==========================================================================*/
11674bool QCamera3HardwareInterface::needRotationReprocess()
11675{
11676 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11677 // current rotation is not zero, and pp has the capability to process rotation
11678 LOGH("need do reprocess for rotation");
11679 return true;
11680 }
11681
11682 return false;
11683}
11684
11685/*===========================================================================
11686 * FUNCTION : needReprocess
11687 *
11688 * DESCRIPTION: if reprocess in needed
11689 *
11690 * PARAMETERS : none
11691 *
11692 * RETURN : true: needed
11693 * false: no need
11694 *==========================================================================*/
11695bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11696{
11697 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11698 // TODO: add for ZSL HDR later
11699 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11700 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11701 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11702 return true;
11703 } else {
11704 LOGH("already post processed frame");
11705 return false;
11706 }
11707 }
11708 return needRotationReprocess();
11709}
11710
11711/*===========================================================================
11712 * FUNCTION : needJpegExifRotation
11713 *
11714 * DESCRIPTION: if rotation from jpeg is needed
11715 *
11716 * PARAMETERS : none
11717 *
11718 * RETURN : true: needed
11719 * false: no need
11720 *==========================================================================*/
11721bool QCamera3HardwareInterface::needJpegExifRotation()
11722{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011723 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011724 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11725 LOGD("Need use Jpeg EXIF Rotation");
11726 return true;
11727 }
11728 return false;
11729}
11730
11731/*===========================================================================
11732 * FUNCTION : addOfflineReprocChannel
11733 *
11734 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11735 * coming from input channel
11736 *
11737 * PARAMETERS :
11738 * @config : reprocess configuration
11739 * @inputChHandle : pointer to the input (source) channel
11740 *
11741 *
11742 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11743 *==========================================================================*/
11744QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11745 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11746{
11747 int32_t rc = NO_ERROR;
11748 QCamera3ReprocessChannel *pChannel = NULL;
11749
11750 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011751 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11752 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011753 if (NULL == pChannel) {
11754 LOGE("no mem for reprocess channel");
11755 return NULL;
11756 }
11757
11758 rc = pChannel->initialize(IS_TYPE_NONE);
11759 if (rc != NO_ERROR) {
11760 LOGE("init reprocess channel failed, ret = %d", rc);
11761 delete pChannel;
11762 return NULL;
11763 }
11764
11765 // pp feature config
11766 cam_pp_feature_config_t pp_config;
11767 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11768
11769 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11770 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11771 & CAM_QCOM_FEATURE_DSDN) {
11772 //Use CPP CDS incase h/w supports it.
11773 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11774 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11775 }
11776 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11777 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11778 }
11779
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011780 if (config.hdr_param.hdr_enable) {
11781 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11782 pp_config.hdr_param = config.hdr_param;
11783 }
11784
11785 if (mForceHdrSnapshot) {
11786 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11787 pp_config.hdr_param.hdr_enable = 1;
11788 pp_config.hdr_param.hdr_need_1x = 0;
11789 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11790 }
11791
Thierry Strudel3d639192016-09-09 11:52:26 -070011792 rc = pChannel->addReprocStreamsFromSource(pp_config,
11793 config,
11794 IS_TYPE_NONE,
11795 mMetadataChannel);
11796
11797 if (rc != NO_ERROR) {
11798 delete pChannel;
11799 return NULL;
11800 }
11801 return pChannel;
11802}
11803
11804/*===========================================================================
11805 * FUNCTION : getMobicatMask
11806 *
11807 * DESCRIPTION: returns mobicat mask
11808 *
11809 * PARAMETERS : none
11810 *
11811 * RETURN : mobicat mask
11812 *
11813 *==========================================================================*/
11814uint8_t QCamera3HardwareInterface::getMobicatMask()
11815{
11816 return m_MobicatMask;
11817}
11818
11819/*===========================================================================
11820 * FUNCTION : setMobicat
11821 *
11822 * DESCRIPTION: set Mobicat on/off.
11823 *
11824 * PARAMETERS :
11825 * @params : none
11826 *
11827 * RETURN : int32_t type of status
11828 * NO_ERROR -- success
11829 * none-zero failure code
11830 *==========================================================================*/
11831int32_t QCamera3HardwareInterface::setMobicat()
11832{
11833 char value [PROPERTY_VALUE_MAX];
11834 property_get("persist.camera.mobicat", value, "0");
11835 int32_t ret = NO_ERROR;
11836 uint8_t enableMobi = (uint8_t)atoi(value);
11837
11838 if (enableMobi) {
11839 tune_cmd_t tune_cmd;
11840 tune_cmd.type = SET_RELOAD_CHROMATIX;
11841 tune_cmd.module = MODULE_ALL;
11842 tune_cmd.value = TRUE;
11843 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11844 CAM_INTF_PARM_SET_VFE_COMMAND,
11845 tune_cmd);
11846
11847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11848 CAM_INTF_PARM_SET_PP_COMMAND,
11849 tune_cmd);
11850 }
11851 m_MobicatMask = enableMobi;
11852
11853 return ret;
11854}
11855
11856/*===========================================================================
11857* FUNCTION : getLogLevel
11858*
11859* DESCRIPTION: Reads the log level property into a variable
11860*
11861* PARAMETERS :
11862* None
11863*
11864* RETURN :
11865* None
11866*==========================================================================*/
11867void QCamera3HardwareInterface::getLogLevel()
11868{
11869 char prop[PROPERTY_VALUE_MAX];
11870 uint32_t globalLogLevel = 0;
11871
11872 property_get("persist.camera.hal.debug", prop, "0");
11873 int val = atoi(prop);
11874 if (0 <= val) {
11875 gCamHal3LogLevel = (uint32_t)val;
11876 }
11877
Thierry Strudel9ec39c62016-12-28 11:30:05 -080011878 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070011879 gKpiDebugLevel = atoi(prop);
11880
11881 property_get("persist.camera.global.debug", prop, "0");
11882 val = atoi(prop);
11883 if (0 <= val) {
11884 globalLogLevel = (uint32_t)val;
11885 }
11886
11887 /* Highest log level among hal.logs and global.logs is selected */
11888 if (gCamHal3LogLevel < globalLogLevel)
11889 gCamHal3LogLevel = globalLogLevel;
11890
11891 return;
11892}
11893
11894/*===========================================================================
11895 * FUNCTION : validateStreamRotations
11896 *
11897 * DESCRIPTION: Check if the rotations requested are supported
11898 *
11899 * PARAMETERS :
11900 * @stream_list : streams to be configured
11901 *
11902 * RETURN : NO_ERROR on success
11903 * -EINVAL on failure
11904 *
11905 *==========================================================================*/
11906int QCamera3HardwareInterface::validateStreamRotations(
11907 camera3_stream_configuration_t *streamList)
11908{
11909 int rc = NO_ERROR;
11910
11911 /*
11912 * Loop through all streams requested in configuration
11913 * Check if unsupported rotations have been requested on any of them
11914 */
11915 for (size_t j = 0; j < streamList->num_streams; j++){
11916 camera3_stream_t *newStream = streamList->streams[j];
11917
11918 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11919 bool isImplDef = (newStream->format ==
11920 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11921 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11922 isImplDef);
11923
11924 if (isRotated && (!isImplDef || isZsl)) {
11925 LOGE("Error: Unsupported rotation of %d requested for stream"
11926 "type:%d and stream format:%d",
11927 newStream->rotation, newStream->stream_type,
11928 newStream->format);
11929 rc = -EINVAL;
11930 break;
11931 }
11932 }
11933
11934 return rc;
11935}
11936
11937/*===========================================================================
11938* FUNCTION : getFlashInfo
11939*
11940* DESCRIPTION: Retrieve information about whether the device has a flash.
11941*
11942* PARAMETERS :
11943* @cameraId : Camera id to query
11944* @hasFlash : Boolean indicating whether there is a flash device
11945* associated with given camera
11946* @flashNode : If a flash device exists, this will be its device node.
11947*
11948* RETURN :
11949* None
11950*==========================================================================*/
11951void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11952 bool& hasFlash,
11953 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11954{
11955 cam_capability_t* camCapability = gCamCapability[cameraId];
11956 if (NULL == camCapability) {
11957 hasFlash = false;
11958 flashNode[0] = '\0';
11959 } else {
11960 hasFlash = camCapability->flash_available;
11961 strlcpy(flashNode,
11962 (char*)camCapability->flash_dev_name,
11963 QCAMERA_MAX_FILEPATH_LENGTH);
11964 }
11965}
11966
11967/*===========================================================================
11968* FUNCTION : getEepromVersionInfo
11969*
11970* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11971*
11972* PARAMETERS : None
11973*
11974* RETURN : string describing EEPROM version
11975* "\0" if no such info available
11976*==========================================================================*/
11977const char *QCamera3HardwareInterface::getEepromVersionInfo()
11978{
11979 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11980}
11981
11982/*===========================================================================
11983* FUNCTION : getLdafCalib
11984*
11985* DESCRIPTION: Retrieve Laser AF calibration data
11986*
11987* PARAMETERS : None
11988*
11989* RETURN : Two uint32_t describing laser AF calibration data
11990* NULL if none is available.
11991*==========================================================================*/
11992const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11993{
11994 if (mLdafCalibExist) {
11995 return &mLdafCalib[0];
11996 } else {
11997 return NULL;
11998 }
11999}
12000
12001/*===========================================================================
12002 * FUNCTION : dynamicUpdateMetaStreamInfo
12003 *
12004 * DESCRIPTION: This function:
12005 * (1) stops all the channels
12006 * (2) returns error on pending requests and buffers
12007 * (3) sends metastream_info in setparams
12008 * (4) starts all channels
12009 * This is useful when sensor has to be restarted to apply any
12010 * settings such as frame rate from a different sensor mode
12011 *
12012 * PARAMETERS : None
12013 *
12014 * RETURN : NO_ERROR on success
12015 * Error codes on failure
12016 *
12017 *==========================================================================*/
12018int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12019{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012020 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012021 int rc = NO_ERROR;
12022
12023 LOGD("E");
12024
12025 rc = stopAllChannels();
12026 if (rc < 0) {
12027 LOGE("stopAllChannels failed");
12028 return rc;
12029 }
12030
12031 rc = notifyErrorForPendingRequests();
12032 if (rc < 0) {
12033 LOGE("notifyErrorForPendingRequests failed");
12034 return rc;
12035 }
12036
12037 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12038 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12039 "Format:%d",
12040 mStreamConfigInfo.type[i],
12041 mStreamConfigInfo.stream_sizes[i].width,
12042 mStreamConfigInfo.stream_sizes[i].height,
12043 mStreamConfigInfo.postprocess_mask[i],
12044 mStreamConfigInfo.format[i]);
12045 }
12046
12047 /* Send meta stream info once again so that ISP can start */
12048 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12049 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12050 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12051 mParameters);
12052 if (rc < 0) {
12053 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12054 }
12055
12056 rc = startAllChannels();
12057 if (rc < 0) {
12058 LOGE("startAllChannels failed");
12059 return rc;
12060 }
12061
12062 LOGD("X");
12063 return rc;
12064}
12065
12066/*===========================================================================
12067 * FUNCTION : stopAllChannels
12068 *
12069 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12070 *
12071 * PARAMETERS : None
12072 *
12073 * RETURN : NO_ERROR on success
12074 * Error codes on failure
12075 *
12076 *==========================================================================*/
12077int32_t QCamera3HardwareInterface::stopAllChannels()
12078{
12079 int32_t rc = NO_ERROR;
12080
12081 LOGD("Stopping all channels");
12082 // Stop the Streams/Channels
12083 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12084 it != mStreamInfo.end(); it++) {
12085 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12086 if (channel) {
12087 channel->stop();
12088 }
12089 (*it)->status = INVALID;
12090 }
12091
12092 if (mSupportChannel) {
12093 mSupportChannel->stop();
12094 }
12095 if (mAnalysisChannel) {
12096 mAnalysisChannel->stop();
12097 }
12098 if (mRawDumpChannel) {
12099 mRawDumpChannel->stop();
12100 }
12101 if (mMetadataChannel) {
12102 /* If content of mStreamInfo is not 0, there is metadata stream */
12103 mMetadataChannel->stop();
12104 }
12105
12106 LOGD("All channels stopped");
12107 return rc;
12108}
12109
12110/*===========================================================================
12111 * FUNCTION : startAllChannels
12112 *
12113 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12114 *
12115 * PARAMETERS : None
12116 *
12117 * RETURN : NO_ERROR on success
12118 * Error codes on failure
12119 *
12120 *==========================================================================*/
12121int32_t QCamera3HardwareInterface::startAllChannels()
12122{
12123 int32_t rc = NO_ERROR;
12124
12125 LOGD("Start all channels ");
12126 // Start the Streams/Channels
12127 if (mMetadataChannel) {
12128 /* If content of mStreamInfo is not 0, there is metadata stream */
12129 rc = mMetadataChannel->start();
12130 if (rc < 0) {
12131 LOGE("META channel start failed");
12132 return rc;
12133 }
12134 }
12135 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12136 it != mStreamInfo.end(); it++) {
12137 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12138 if (channel) {
12139 rc = channel->start();
12140 if (rc < 0) {
12141 LOGE("channel start failed");
12142 return rc;
12143 }
12144 }
12145 }
12146 if (mAnalysisChannel) {
12147 mAnalysisChannel->start();
12148 }
12149 if (mSupportChannel) {
12150 rc = mSupportChannel->start();
12151 if (rc < 0) {
12152 LOGE("Support channel start failed");
12153 return rc;
12154 }
12155 }
12156 if (mRawDumpChannel) {
12157 rc = mRawDumpChannel->start();
12158 if (rc < 0) {
12159 LOGE("RAW dump channel start failed");
12160 return rc;
12161 }
12162 }
12163
12164 LOGD("All channels started");
12165 return rc;
12166}
12167
12168/*===========================================================================
12169 * FUNCTION : notifyErrorForPendingRequests
12170 *
12171 * DESCRIPTION: This function sends error for all the pending requests/buffers
12172 *
12173 * PARAMETERS : None
12174 *
12175 * RETURN : Error codes
12176 * NO_ERROR on success
12177 *
12178 *==========================================================================*/
12179int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12180{
12181 int32_t rc = NO_ERROR;
12182 unsigned int frameNum = 0;
12183 camera3_capture_result_t result;
12184 camera3_stream_buffer_t *pStream_Buf = NULL;
12185
12186 memset(&result, 0, sizeof(camera3_capture_result_t));
12187
12188 if (mPendingRequestsList.size() > 0) {
12189 pendingRequestIterator i = mPendingRequestsList.begin();
12190 frameNum = i->frame_number;
12191 } else {
12192 /* There might still be pending buffers even though there are
12193 no pending requests. Setting the frameNum to MAX so that
12194 all the buffers with smaller frame numbers are returned */
12195 frameNum = UINT_MAX;
12196 }
12197
12198 LOGH("Oldest frame num on mPendingRequestsList = %u",
12199 frameNum);
12200
12201 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12202 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12203
12204 if (req->frame_number < frameNum) {
12205 // Send Error notify to frameworks for each buffer for which
12206 // metadata buffer is already sent
12207 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12208 req->frame_number, req->mPendingBufferList.size());
12209
12210 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12211 if (NULL == pStream_Buf) {
12212 LOGE("No memory for pending buffers array");
12213 return NO_MEMORY;
12214 }
12215 memset(pStream_Buf, 0,
12216 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12217 result.result = NULL;
12218 result.frame_number = req->frame_number;
12219 result.num_output_buffers = req->mPendingBufferList.size();
12220 result.output_buffers = pStream_Buf;
12221
12222 size_t index = 0;
12223 for (auto info = req->mPendingBufferList.begin();
12224 info != req->mPendingBufferList.end(); ) {
12225
12226 camera3_notify_msg_t notify_msg;
12227 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12228 notify_msg.type = CAMERA3_MSG_ERROR;
12229 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12230 notify_msg.message.error.error_stream = info->stream;
12231 notify_msg.message.error.frame_number = req->frame_number;
12232 pStream_Buf[index].acquire_fence = -1;
12233 pStream_Buf[index].release_fence = -1;
12234 pStream_Buf[index].buffer = info->buffer;
12235 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12236 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012237 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012238 index++;
12239 // Remove buffer from list
12240 info = req->mPendingBufferList.erase(info);
12241 }
12242
12243 // Remove this request from Map
12244 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12245 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12246 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12247
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012248 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012249
12250 delete [] pStream_Buf;
12251 } else {
12252
12253 // Go through the pending requests info and send error request to framework
12254 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12255
12256 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12257
12258 // Send error notify to frameworks
12259 camera3_notify_msg_t notify_msg;
12260 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12261 notify_msg.type = CAMERA3_MSG_ERROR;
12262 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12263 notify_msg.message.error.error_stream = NULL;
12264 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012265 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012266
12267 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12268 if (NULL == pStream_Buf) {
12269 LOGE("No memory for pending buffers array");
12270 return NO_MEMORY;
12271 }
12272 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12273
12274 result.result = NULL;
12275 result.frame_number = req->frame_number;
12276 result.input_buffer = i->input_buffer;
12277 result.num_output_buffers = req->mPendingBufferList.size();
12278 result.output_buffers = pStream_Buf;
12279
12280 size_t index = 0;
12281 for (auto info = req->mPendingBufferList.begin();
12282 info != req->mPendingBufferList.end(); ) {
12283 pStream_Buf[index].acquire_fence = -1;
12284 pStream_Buf[index].release_fence = -1;
12285 pStream_Buf[index].buffer = info->buffer;
12286 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12287 pStream_Buf[index].stream = info->stream;
12288 index++;
12289 // Remove buffer from list
12290 info = req->mPendingBufferList.erase(info);
12291 }
12292
12293 // Remove this request from Map
12294 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12295 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12296 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12297
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012298 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012299 delete [] pStream_Buf;
12300 i = erasePendingRequest(i);
12301 }
12302 }
12303
12304 /* Reset pending frame Drop list and requests list */
12305 mPendingFrameDropList.clear();
12306
12307 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12308 req.mPendingBufferList.clear();
12309 }
12310 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12311 mPendingReprocessResultList.clear();
12312 LOGH("Cleared all the pending buffers ");
12313
12314 return rc;
12315}
12316
12317bool QCamera3HardwareInterface::isOnEncoder(
12318 const cam_dimension_t max_viewfinder_size,
12319 uint32_t width, uint32_t height)
12320{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012321 return ((width > (uint32_t)max_viewfinder_size.width) ||
12322 (height > (uint32_t)max_viewfinder_size.height) ||
12323 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12324 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012325}
12326
12327/*===========================================================================
12328 * FUNCTION : setBundleInfo
12329 *
12330 * DESCRIPTION: Set bundle info for all streams that are bundle.
12331 *
12332 * PARAMETERS : None
12333 *
12334 * RETURN : NO_ERROR on success
12335 * Error codes on failure
12336 *==========================================================================*/
12337int32_t QCamera3HardwareInterface::setBundleInfo()
12338{
12339 int32_t rc = NO_ERROR;
12340
12341 if (mChannelHandle) {
12342 cam_bundle_config_t bundleInfo;
12343 memset(&bundleInfo, 0, sizeof(bundleInfo));
12344 rc = mCameraHandle->ops->get_bundle_info(
12345 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12346 if (rc != NO_ERROR) {
12347 LOGE("get_bundle_info failed");
12348 return rc;
12349 }
12350 if (mAnalysisChannel) {
12351 mAnalysisChannel->setBundleInfo(bundleInfo);
12352 }
12353 if (mSupportChannel) {
12354 mSupportChannel->setBundleInfo(bundleInfo);
12355 }
12356 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12357 it != mStreamInfo.end(); it++) {
12358 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12359 channel->setBundleInfo(bundleInfo);
12360 }
12361 if (mRawDumpChannel) {
12362 mRawDumpChannel->setBundleInfo(bundleInfo);
12363 }
12364 }
12365
12366 return rc;
12367}
12368
12369/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012370 * FUNCTION : setInstantAEC
12371 *
12372 * DESCRIPTION: Set Instant AEC related params.
12373 *
12374 * PARAMETERS :
12375 * @meta: CameraMetadata reference
12376 *
12377 * RETURN : NO_ERROR on success
12378 * Error codes on failure
12379 *==========================================================================*/
12380int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12381{
12382 int32_t rc = NO_ERROR;
12383 uint8_t val = 0;
12384 char prop[PROPERTY_VALUE_MAX];
12385
12386 // First try to configure instant AEC from framework metadata
12387 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12388 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12389 }
12390
12391 // If framework did not set this value, try to read from set prop.
12392 if (val == 0) {
12393 memset(prop, 0, sizeof(prop));
12394 property_get("persist.camera.instant.aec", prop, "0");
12395 val = (uint8_t)atoi(prop);
12396 }
12397
12398 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12399 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12400 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12401 mInstantAEC = val;
12402 mInstantAECSettledFrameNumber = 0;
12403 mInstantAecFrameIdxCount = 0;
12404 LOGH("instantAEC value set %d",val);
12405 if (mInstantAEC) {
12406 memset(prop, 0, sizeof(prop));
12407 property_get("persist.camera.ae.instant.bound", prop, "10");
12408 int32_t aec_frame_skip_cnt = atoi(prop);
12409 if (aec_frame_skip_cnt >= 0) {
12410 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12411 } else {
12412 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12413 rc = BAD_VALUE;
12414 }
12415 }
12416 } else {
12417 LOGE("Bad instant aec value set %d", val);
12418 rc = BAD_VALUE;
12419 }
12420 return rc;
12421}
12422
12423/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012424 * FUNCTION : get_num_overall_buffers
12425 *
12426 * DESCRIPTION: Estimate number of pending buffers across all requests.
12427 *
12428 * PARAMETERS : None
12429 *
12430 * RETURN : Number of overall pending buffers
12431 *
12432 *==========================================================================*/
12433uint32_t PendingBuffersMap::get_num_overall_buffers()
12434{
12435 uint32_t sum_buffers = 0;
12436 for (auto &req : mPendingBuffersInRequest) {
12437 sum_buffers += req.mPendingBufferList.size();
12438 }
12439 return sum_buffers;
12440}
12441
12442/*===========================================================================
12443 * FUNCTION : removeBuf
12444 *
12445 * DESCRIPTION: Remove a matching buffer from tracker.
12446 *
12447 * PARAMETERS : @buffer: image buffer for the callback
12448 *
12449 * RETURN : None
12450 *
12451 *==========================================================================*/
12452void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12453{
12454 bool buffer_found = false;
12455 for (auto req = mPendingBuffersInRequest.begin();
12456 req != mPendingBuffersInRequest.end(); req++) {
12457 for (auto k = req->mPendingBufferList.begin();
12458 k != req->mPendingBufferList.end(); k++ ) {
12459 if (k->buffer == buffer) {
12460 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12461 req->frame_number, buffer);
12462 k = req->mPendingBufferList.erase(k);
12463 if (req->mPendingBufferList.empty()) {
12464 // Remove this request from Map
12465 req = mPendingBuffersInRequest.erase(req);
12466 }
12467 buffer_found = true;
12468 break;
12469 }
12470 }
12471 if (buffer_found) {
12472 break;
12473 }
12474 }
12475 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12476 get_num_overall_buffers());
12477}
12478
12479/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012480 * FUNCTION : getBufErrStatus
12481 *
12482 * DESCRIPTION: get buffer error status
12483 *
12484 * PARAMETERS : @buffer: buffer handle
12485 *
12486 * RETURN : Error status
12487 *
12488 *==========================================================================*/
12489int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12490{
12491 for (auto& req : mPendingBuffersInRequest) {
12492 for (auto& k : req.mPendingBufferList) {
12493 if (k.buffer == buffer)
12494 return k.bufStatus;
12495 }
12496 }
12497 return CAMERA3_BUFFER_STATUS_OK;
12498}
12499
12500/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012501 * FUNCTION : setPAAFSupport
12502 *
12503 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12504 * feature mask according to stream type and filter
12505 * arrangement
12506 *
12507 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12508 * @stream_type: stream type
12509 * @filter_arrangement: filter arrangement
12510 *
12511 * RETURN : None
12512 *==========================================================================*/
12513void QCamera3HardwareInterface::setPAAFSupport(
12514 cam_feature_mask_t& feature_mask,
12515 cam_stream_type_t stream_type,
12516 cam_color_filter_arrangement_t filter_arrangement)
12517{
12518 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12519 feature_mask, stream_type, filter_arrangement);
12520
12521 switch (filter_arrangement) {
12522 case CAM_FILTER_ARRANGEMENT_RGGB:
12523 case CAM_FILTER_ARRANGEMENT_GRBG:
12524 case CAM_FILTER_ARRANGEMENT_GBRG:
12525 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012526 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12527 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012528 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12529 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12530 }
12531 break;
12532 case CAM_FILTER_ARRANGEMENT_Y:
12533 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12534 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12535 }
12536 break;
12537 default:
12538 break;
12539 }
12540}
12541
12542/*===========================================================================
12543* FUNCTION : getSensorMountAngle
12544*
12545* DESCRIPTION: Retrieve sensor mount angle
12546*
12547* PARAMETERS : None
12548*
12549* RETURN : sensor mount angle in uint32_t
12550*==========================================================================*/
12551uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12552{
12553 return gCamCapability[mCameraId]->sensor_mount_angle;
12554}
12555
12556/*===========================================================================
12557* FUNCTION : getRelatedCalibrationData
12558*
12559* DESCRIPTION: Retrieve related system calibration data
12560*
12561* PARAMETERS : None
12562*
12563* RETURN : Pointer of related system calibration data
12564*==========================================================================*/
12565const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12566{
12567 return (const cam_related_system_calibration_data_t *)
12568 &(gCamCapability[mCameraId]->related_cam_calibration);
12569}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012570
12571/*===========================================================================
12572 * FUNCTION : is60HzZone
12573 *
12574 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12575 *
12576 * PARAMETERS : None
12577 *
12578 * RETURN : True if in 60Hz zone, False otherwise
12579 *==========================================================================*/
12580bool QCamera3HardwareInterface::is60HzZone()
12581{
12582 time_t t = time(NULL);
12583 struct tm lt;
12584
12585 struct tm* r = localtime_r(&t, &lt);
12586
12587 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12588 return true;
12589 else
12590 return false;
12591}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070012592
12593/*===========================================================================
12594 * FUNCTION : adjustBlackLevelForCFA
12595 *
12596 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
12597 * of bayer CFA (Color Filter Array).
12598 *
12599 * PARAMETERS : @input: black level pattern in the order of RGGB
12600 * @output: black level pattern in the order of CFA
12601 * @color_arrangement: CFA color arrangement
12602 *
12603 * RETURN : None
12604 *==========================================================================*/
12605template<typename T>
12606void QCamera3HardwareInterface::adjustBlackLevelForCFA(
12607 T input[BLACK_LEVEL_PATTERN_CNT],
12608 T output[BLACK_LEVEL_PATTERN_CNT],
12609 cam_color_filter_arrangement_t color_arrangement)
12610{
12611 switch (color_arrangement) {
12612 case CAM_FILTER_ARRANGEMENT_GRBG:
12613 output[0] = input[1];
12614 output[1] = input[0];
12615 output[2] = input[3];
12616 output[3] = input[2];
12617 break;
12618 case CAM_FILTER_ARRANGEMENT_GBRG:
12619 output[0] = input[2];
12620 output[1] = input[3];
12621 output[2] = input[0];
12622 output[3] = input[1];
12623 break;
12624 case CAM_FILTER_ARRANGEMENT_BGGR:
12625 output[0] = input[3];
12626 output[1] = input[2];
12627 output[2] = input[1];
12628 output[3] = input[0];
12629 break;
12630 case CAM_FILTER_ARRANGEMENT_RGGB:
12631 output[0] = input[0];
12632 output[1] = input[1];
12633 output[2] = input[2];
12634 output[3] = input[3];
12635 break;
12636 default:
12637 LOGE("Invalid color arrangement to derive dynamic blacklevel");
12638 break;
12639 }
12640}
Thierry Strudel3d639192016-09-09 11:52:26 -070012641}; //end namespace qcamera