blob: 33c8859eb3fe1c3df35e92693c33ed670362660b [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
60extern "C" {
61#include "mm_camera_dbg.h"
62}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080063#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070064
65using namespace android;
66
67namespace qcamera {
68
69#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
70
71#define EMPTY_PIPELINE_DELAY 2
72#define PARTIAL_RESULT_COUNT 2
73#define FRAME_SKIP_DELAY 0
74
75#define MAX_VALUE_8BIT ((1<<8)-1)
76#define MAX_VALUE_10BIT ((1<<10)-1)
77#define MAX_VALUE_12BIT ((1<<12)-1)
78
79#define VIDEO_4K_WIDTH 3840
80#define VIDEO_4K_HEIGHT 2160
81
82#define MAX_EIS_WIDTH 1920
83#define MAX_EIS_HEIGHT 1080
84
85#define MAX_RAW_STREAMS 1
86#define MAX_STALLING_STREAMS 1
87#define MAX_PROCESSED_STREAMS 3
88/* Batch mode is enabled only if FPS set is equal to or greater than this */
89#define MIN_FPS_FOR_BATCH_MODE (120)
90#define PREVIEW_FPS_FOR_HFR (30)
91#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080092#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070093#define MAX_HFR_BATCH_SIZE (8)
94#define REGIONS_TUPLE_COUNT 5
95#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070096// Set a threshold for detection of missing buffers //seconds
97#define MISSING_REQUEST_BUF_TIMEOUT 3
98#define FLUSH_TIMEOUT 3
99#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
100
101#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
102 CAM_QCOM_FEATURE_CROP |\
103 CAM_QCOM_FEATURE_ROTATION |\
104 CAM_QCOM_FEATURE_SHARPNESS |\
105 CAM_QCOM_FEATURE_SCALE |\
106 CAM_QCOM_FEATURE_CAC |\
107 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700108/* Per configuration size for static metadata length*/
109#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700110
111#define TIMEOUT_NEVER -1
112
Thierry Strudel04e026f2016-10-10 11:27:36 -0700113/* Face landmarks indices */
114#define LEFT_EYE_X 0
115#define LEFT_EYE_Y 1
116#define RIGHT_EYE_X 2
117#define RIGHT_EYE_Y 3
118#define MOUTH_X 4
119#define MOUTH_Y 5
120#define TOTAL_LANDMARK_INDICES 6
121
Thierry Strudel3d639192016-09-09 11:52:26 -0700122cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
123const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
124extern pthread_mutex_t gCamLock;
125volatile uint32_t gCamHal3LogLevel = 1;
126extern uint8_t gNumCameraSessions;
127
128const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
129 {"On", CAM_CDS_MODE_ON},
130 {"Off", CAM_CDS_MODE_OFF},
131 {"Auto",CAM_CDS_MODE_AUTO}
132};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700133const QCamera3HardwareInterface::QCameraMap<
134 camera_metadata_enum_android_video_hdr_mode_t,
135 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
136 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
137 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
138};
139
140
141const QCamera3HardwareInterface::QCameraMap<
142 camera_metadata_enum_android_ir_mode_t,
143 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
144 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
145 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
146 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
147};
Thierry Strudel3d639192016-09-09 11:52:26 -0700148
149const QCamera3HardwareInterface::QCameraMap<
150 camera_metadata_enum_android_control_effect_mode_t,
151 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
152 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
153 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
154 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
155 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
157 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
158 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
159 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
160 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
161};
162
163const QCamera3HardwareInterface::QCameraMap<
164 camera_metadata_enum_android_control_awb_mode_t,
165 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
166 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
167 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
168 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
169 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
170 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
171 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
172 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
173 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
174 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
175};
176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_scene_mode_t,
179 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
180 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
181 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
182 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
183 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
184 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
185 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
187 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
188 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
189 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
190 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
191 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
192 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
193 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
194 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
195 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_af_mode_t,
200 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
201 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
202 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
203 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
204 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
205 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
206 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
207 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
208};
209
210const QCamera3HardwareInterface::QCameraMap<
211 camera_metadata_enum_android_color_correction_aberration_mode_t,
212 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
214 CAM_COLOR_CORRECTION_ABERRATION_OFF },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
216 CAM_COLOR_CORRECTION_ABERRATION_FAST },
217 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
218 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
219};
220
221const QCamera3HardwareInterface::QCameraMap<
222 camera_metadata_enum_android_control_ae_antibanding_mode_t,
223 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
226 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
227 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
228};
229
230const QCamera3HardwareInterface::QCameraMap<
231 camera_metadata_enum_android_control_ae_mode_t,
232 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
233 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
234 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
236 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
237 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_flash_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
243 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
245 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
246};
247
248const QCamera3HardwareInterface::QCameraMap<
249 camera_metadata_enum_android_statistics_face_detect_mode_t,
250 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
252 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
253 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
254};
255
256const QCamera3HardwareInterface::QCameraMap<
257 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
258 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
260 CAM_FOCUS_UNCALIBRATED },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
262 CAM_FOCUS_APPROXIMATE },
263 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
264 CAM_FOCUS_CALIBRATED }
265};
266
267const QCamera3HardwareInterface::QCameraMap<
268 camera_metadata_enum_android_lens_state_t,
269 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
270 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
271 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
272};
273
274const int32_t available_thumbnail_sizes[] = {0, 0,
275 176, 144,
276 240, 144,
277 256, 144,
278 240, 160,
279 256, 154,
280 240, 240,
281 320, 240};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_sensor_test_pattern_mode_t,
285 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
292};
293
294/* Since there is no mapping for all the options some Android enum are not listed.
295 * Also, the order in this list is important because while mapping from HAL to Android it will
296 * traverse from lower to higher index which means that for HAL values that are map to different
297 * Android values, the traverse logic will select the first one found.
298 */
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_sensor_reference_illuminant1_t,
301 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
318};
319
320const QCamera3HardwareInterface::QCameraMap<
321 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
322 { 60, CAM_HFR_MODE_60FPS},
323 { 90, CAM_HFR_MODE_90FPS},
324 { 120, CAM_HFR_MODE_120FPS},
325 { 150, CAM_HFR_MODE_150FPS},
326 { 180, CAM_HFR_MODE_180FPS},
327 { 210, CAM_HFR_MODE_210FPS},
328 { 240, CAM_HFR_MODE_240FPS},
329 { 480, CAM_HFR_MODE_480FPS},
330};
331
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700332const QCamera3HardwareInterface::QCameraMap<
333 qcamera3_ext_instant_aec_mode_t,
334 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
335 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
336 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
337 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
338};
Thierry Strudel3d639192016-09-09 11:52:26 -0700339camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
340 .initialize = QCamera3HardwareInterface::initialize,
341 .configure_streams = QCamera3HardwareInterface::configure_streams,
342 .register_stream_buffers = NULL,
343 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
344 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
345 .get_metadata_vendor_tag_ops = NULL,
346 .dump = QCamera3HardwareInterface::dump,
347 .flush = QCamera3HardwareInterface::flush,
348 .reserved = {0},
349};
350
351// initialise to some default value
352uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
353
354/*===========================================================================
355 * FUNCTION : QCamera3HardwareInterface
356 *
357 * DESCRIPTION: constructor of QCamera3HardwareInterface
358 *
359 * PARAMETERS :
360 * @cameraId : camera ID
361 *
362 * RETURN : none
363 *==========================================================================*/
364QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
365 const camera_module_callbacks_t *callbacks)
366 : mCameraId(cameraId),
367 mCameraHandle(NULL),
368 mCameraInitialized(false),
369 mCallbackOps(NULL),
370 mMetadataChannel(NULL),
371 mPictureChannel(NULL),
372 mRawChannel(NULL),
373 mSupportChannel(NULL),
374 mAnalysisChannel(NULL),
375 mRawDumpChannel(NULL),
376 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800377 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700378 mCommon(),
379 mChannelHandle(0),
380 mFirstConfiguration(true),
381 mFlush(false),
382 mFlushPerf(false),
383 mParamHeap(NULL),
384 mParameters(NULL),
385 mPrevParameters(NULL),
386 m_bIsVideo(false),
387 m_bIs4KVideo(false),
388 m_bEisSupportedSize(false),
389 m_bEisEnable(false),
390 m_MobicatMask(0),
391 mMinProcessedFrameDuration(0),
392 mMinJpegFrameDuration(0),
393 mMinRawFrameDuration(0),
394 mMetaFrameCount(0U),
395 mUpdateDebugLevel(false),
396 mCallbacks(callbacks),
397 mCaptureIntent(0),
398 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800399 /* DevCamDebug metadata internal m control*/
400 mDevCamDebugMetaEnable(0),
401 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700402 mBatchSize(0),
403 mToBeQueuedVidBufs(0),
404 mHFRVideoFps(DEFAULT_VIDEO_FPS),
405 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
406 mFirstFrameNumberInBatch(0),
407 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800408 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700409 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
410 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700411 mInstantAEC(false),
412 mResetInstantAEC(false),
413 mInstantAECSettledFrameNumber(0),
414 mAecSkipDisplayFrameBound(0),
415 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700416 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700417 mLastCustIntentFrmNum(-1),
418 mState(CLOSED),
419 mIsDeviceLinked(false),
420 mIsMainCamera(true),
421 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700422 m_pDualCamCmdHeap(NULL),
423 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700424{
425 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700426 mCommon.init(gCamCapability[cameraId]);
427 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700428#ifndef USE_HAL_3_3
429 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
430#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700431 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700432#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700433 mCameraDevice.common.close = close_camera_device;
434 mCameraDevice.ops = &mCameraOps;
435 mCameraDevice.priv = this;
436 gCamCapability[cameraId]->version = CAM_HAL_V3;
437 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
438 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
439 gCamCapability[cameraId]->min_num_pp_bufs = 3;
440
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800441 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700442
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800443 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700444 mPendingLiveRequest = 0;
445 mCurrentRequestId = -1;
446 pthread_mutex_init(&mMutex, NULL);
447
448 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
449 mDefaultMetadata[i] = NULL;
450
451 // Getting system props of different kinds
452 char prop[PROPERTY_VALUE_MAX];
453 memset(prop, 0, sizeof(prop));
454 property_get("persist.camera.raw.dump", prop, "0");
455 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800456 property_get("persist.camera.hal3.force.hdr", prop, "0");
457 mForceHdrSnapshot = atoi(prop);
458
Thierry Strudel3d639192016-09-09 11:52:26 -0700459 if (mEnableRawDump)
460 LOGD("Raw dump from Camera HAL enabled");
461
462 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
463 memset(mLdafCalib, 0, sizeof(mLdafCalib));
464
465 memset(prop, 0, sizeof(prop));
466 property_get("persist.camera.tnr.preview", prop, "0");
467 m_bTnrPreview = (uint8_t)atoi(prop);
468
469 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800470 property_get("persist.camera.swtnr.preview", prop, "1");
471 m_bSwTnrPreview = (uint8_t)atoi(prop);
472
473 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700474 property_get("persist.camera.tnr.video", prop, "0");
475 m_bTnrVideo = (uint8_t)atoi(prop);
476
477 memset(prop, 0, sizeof(prop));
478 property_get("persist.camera.avtimer.debug", prop, "0");
479 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800480 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700481
482 //Load and read GPU library.
483 lib_surface_utils = NULL;
484 LINK_get_surface_pixel_alignment = NULL;
485 mSurfaceStridePadding = CAM_PAD_TO_32;
486 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
487 if (lib_surface_utils) {
488 *(void **)&LINK_get_surface_pixel_alignment =
489 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
490 if (LINK_get_surface_pixel_alignment) {
491 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
492 }
493 dlclose(lib_surface_utils);
494 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700495
496 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700497}
498
499/*===========================================================================
500 * FUNCTION : ~QCamera3HardwareInterface
501 *
502 * DESCRIPTION: destructor of QCamera3HardwareInterface
503 *
504 * PARAMETERS : none
505 *
506 * RETURN : none
507 *==========================================================================*/
508QCamera3HardwareInterface::~QCamera3HardwareInterface()
509{
510 LOGD("E");
511
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800512 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700513
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800514 // Disable power hint and enable the perf lock for close camera
515 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
516 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
517
518 // unlink of dualcam during close camera
519 if (mIsDeviceLinked) {
520 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
521 &m_pDualCamCmdPtr->bundle_info;
522 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
523 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
524 pthread_mutex_lock(&gCamLock);
525
526 if (mIsMainCamera == 1) {
527 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
528 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
529 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
530 // related session id should be session id of linked session
531 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
532 } else {
533 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
534 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
535 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
536 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
537 }
538 pthread_mutex_unlock(&gCamLock);
539
540 rc = mCameraHandle->ops->set_dual_cam_cmd(
541 mCameraHandle->camera_handle);
542 if (rc < 0) {
543 LOGE("Dualcam: Unlink failed, but still proceed to close");
544 }
545 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700546
547 /* We need to stop all streams before deleting any stream */
548 if (mRawDumpChannel) {
549 mRawDumpChannel->stop();
550 }
551
552 // NOTE: 'camera3_stream_t *' objects are already freed at
553 // this stage by the framework
554 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
555 it != mStreamInfo.end(); it++) {
556 QCamera3ProcessingChannel *channel = (*it)->channel;
557 if (channel) {
558 channel->stop();
559 }
560 }
561 if (mSupportChannel)
562 mSupportChannel->stop();
563
564 if (mAnalysisChannel) {
565 mAnalysisChannel->stop();
566 }
567 if (mMetadataChannel) {
568 mMetadataChannel->stop();
569 }
570 if (mChannelHandle) {
571 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
572 mChannelHandle);
573 LOGD("stopping channel %d", mChannelHandle);
574 }
575
576 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
577 it != mStreamInfo.end(); it++) {
578 QCamera3ProcessingChannel *channel = (*it)->channel;
579 if (channel)
580 delete channel;
581 free (*it);
582 }
583 if (mSupportChannel) {
584 delete mSupportChannel;
585 mSupportChannel = NULL;
586 }
587
588 if (mAnalysisChannel) {
589 delete mAnalysisChannel;
590 mAnalysisChannel = NULL;
591 }
592 if (mRawDumpChannel) {
593 delete mRawDumpChannel;
594 mRawDumpChannel = NULL;
595 }
596 if (mDummyBatchChannel) {
597 delete mDummyBatchChannel;
598 mDummyBatchChannel = NULL;
599 }
600
601 mPictureChannel = NULL;
602
603 if (mMetadataChannel) {
604 delete mMetadataChannel;
605 mMetadataChannel = NULL;
606 }
607
608 /* Clean up all channels */
609 if (mCameraInitialized) {
610 if(!mFirstConfiguration){
611 //send the last unconfigure
612 cam_stream_size_info_t stream_config_info;
613 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
614 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
615 stream_config_info.buffer_info.max_buffers =
616 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700617 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700618 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
619 stream_config_info);
620 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
621 if (rc < 0) {
622 LOGE("set_parms failed for unconfigure");
623 }
624 }
625 deinitParameters();
626 }
627
628 if (mChannelHandle) {
629 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
630 mChannelHandle);
631 LOGH("deleting channel %d", mChannelHandle);
632 mChannelHandle = 0;
633 }
634
635 if (mState != CLOSED)
636 closeCamera();
637
638 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
639 req.mPendingBufferList.clear();
640 }
641 mPendingBuffersMap.mPendingBuffersInRequest.clear();
642 mPendingReprocessResultList.clear();
643 for (pendingRequestIterator i = mPendingRequestsList.begin();
644 i != mPendingRequestsList.end();) {
645 i = erasePendingRequest(i);
646 }
647 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
648 if (mDefaultMetadata[i])
649 free_camera_metadata(mDefaultMetadata[i]);
650
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800651 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700652
653 pthread_cond_destroy(&mRequestCond);
654
655 pthread_cond_destroy(&mBuffersCond);
656
657 pthread_mutex_destroy(&mMutex);
658 LOGD("X");
659}
660
661/*===========================================================================
662 * FUNCTION : erasePendingRequest
663 *
664 * DESCRIPTION: function to erase a desired pending request after freeing any
665 * allocated memory
666 *
667 * PARAMETERS :
668 * @i : iterator pointing to pending request to be erased
669 *
670 * RETURN : iterator pointing to the next request
671 *==========================================================================*/
672QCamera3HardwareInterface::pendingRequestIterator
673 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
674{
675 if (i->input_buffer != NULL) {
676 free(i->input_buffer);
677 i->input_buffer = NULL;
678 }
679 if (i->settings != NULL)
680 free_camera_metadata((camera_metadata_t*)i->settings);
681 return mPendingRequestsList.erase(i);
682}
683
684/*===========================================================================
685 * FUNCTION : camEvtHandle
686 *
687 * DESCRIPTION: Function registered to mm-camera-interface to handle events
688 *
689 * PARAMETERS :
690 * @camera_handle : interface layer camera handle
691 * @evt : ptr to event
692 * @user_data : user data ptr
693 *
694 * RETURN : none
695 *==========================================================================*/
696void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
697 mm_camera_event_t *evt,
698 void *user_data)
699{
700 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
701 if (obj && evt) {
702 switch(evt->server_event_type) {
703 case CAM_EVENT_TYPE_DAEMON_DIED:
704 pthread_mutex_lock(&obj->mMutex);
705 obj->mState = ERROR;
706 pthread_mutex_unlock(&obj->mMutex);
707 LOGE("Fatal, camera daemon died");
708 break;
709
710 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
711 LOGD("HAL got request pull from Daemon");
712 pthread_mutex_lock(&obj->mMutex);
713 obj->mWokenUpByDaemon = true;
714 obj->unblockRequestIfNecessary();
715 pthread_mutex_unlock(&obj->mMutex);
716 break;
717
718 default:
719 LOGW("Warning: Unhandled event %d",
720 evt->server_event_type);
721 break;
722 }
723 } else {
724 LOGE("NULL user_data/evt");
725 }
726}
727
728/*===========================================================================
729 * FUNCTION : openCamera
730 *
731 * DESCRIPTION: open camera
732 *
733 * PARAMETERS :
734 * @hw_device : double ptr for camera device struct
735 *
736 * RETURN : int32_t type of status
737 * NO_ERROR -- success
738 * none-zero failure code
739 *==========================================================================*/
740int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
741{
742 int rc = 0;
743 if (mState != CLOSED) {
744 *hw_device = NULL;
745 return PERMISSION_DENIED;
746 }
747
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800748 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700749 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
750 mCameraId);
751
752 rc = openCamera();
753 if (rc == 0) {
754 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800755 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700756 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800757 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700758
Thierry Strudel3d639192016-09-09 11:52:26 -0700759 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
760 mCameraId, rc);
761
762 if (rc == NO_ERROR) {
763 mState = OPENED;
764 }
765 return rc;
766}
767
768/*===========================================================================
769 * FUNCTION : openCamera
770 *
771 * DESCRIPTION: open camera
772 *
773 * PARAMETERS : none
774 *
775 * RETURN : int32_t type of status
776 * NO_ERROR -- success
777 * none-zero failure code
778 *==========================================================================*/
779int QCamera3HardwareInterface::openCamera()
780{
781 int rc = 0;
782 char value[PROPERTY_VALUE_MAX];
783
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800784 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700785 if (mCameraHandle) {
786 LOGE("Failure: Camera already opened");
787 return ALREADY_EXISTS;
788 }
789
790 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
791 if (rc < 0) {
792 LOGE("Failed to reserve flash for camera id: %d",
793 mCameraId);
794 return UNKNOWN_ERROR;
795 }
796
797 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
798 if (rc) {
799 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
800 return rc;
801 }
802
803 if (!mCameraHandle) {
804 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
805 return -ENODEV;
806 }
807
808 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
809 camEvtHandle, (void *)this);
810
811 if (rc < 0) {
812 LOGE("Error, failed to register event callback");
813 /* Not closing camera here since it is already handled in destructor */
814 return FAILED_TRANSACTION;
815 }
816
817 mExifParams.debug_params =
818 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
819 if (mExifParams.debug_params) {
820 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
821 } else {
822 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
823 return NO_MEMORY;
824 }
825 mFirstConfiguration = true;
826
827 //Notify display HAL that a camera session is active.
828 //But avoid calling the same during bootup because camera service might open/close
829 //cameras at boot time during its initialization and display service will also internally
830 //wait for camera service to initialize first while calling this display API, resulting in a
831 //deadlock situation. Since boot time camera open/close calls are made only to fetch
832 //capabilities, no need of this display bw optimization.
833 //Use "service.bootanim.exit" property to know boot status.
834 property_get("service.bootanim.exit", value, "0");
835 if (atoi(value) == 1) {
836 pthread_mutex_lock(&gCamLock);
837 if (gNumCameraSessions++ == 0) {
838 setCameraLaunchStatus(true);
839 }
840 pthread_mutex_unlock(&gCamLock);
841 }
842
843 //fill the session id needed while linking dual cam
844 pthread_mutex_lock(&gCamLock);
845 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
846 &sessionId[mCameraId]);
847 pthread_mutex_unlock(&gCamLock);
848
849 if (rc < 0) {
850 LOGE("Error, failed to get sessiion id");
851 return UNKNOWN_ERROR;
852 } else {
853 //Allocate related cam sync buffer
854 //this is needed for the payload that goes along with bundling cmd for related
855 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700856 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
857 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700858 if(rc != OK) {
859 rc = NO_MEMORY;
860 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
861 return NO_MEMORY;
862 }
863
864 //Map memory for related cam sync buffer
865 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700866 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
867 m_pDualCamCmdHeap->getFd(0),
868 sizeof(cam_dual_camera_cmd_info_t),
869 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700870 if(rc < 0) {
871 LOGE("Dualcam: failed to map Related cam sync buffer");
872 rc = FAILED_TRANSACTION;
873 return NO_MEMORY;
874 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700875 m_pDualCamCmdPtr =
876 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700877 }
878
879 LOGH("mCameraId=%d",mCameraId);
880
881 return NO_ERROR;
882}
883
884/*===========================================================================
885 * FUNCTION : closeCamera
886 *
887 * DESCRIPTION: close camera
888 *
889 * PARAMETERS : none
890 *
891 * RETURN : int32_t type of status
892 * NO_ERROR -- success
893 * none-zero failure code
894 *==========================================================================*/
895int QCamera3HardwareInterface::closeCamera()
896{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800897 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700898 int rc = NO_ERROR;
899 char value[PROPERTY_VALUE_MAX];
900
901 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
902 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700903
904 // unmap memory for related cam sync buffer
905 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800906 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700907 if (NULL != m_pDualCamCmdHeap) {
908 m_pDualCamCmdHeap->deallocate();
909 delete m_pDualCamCmdHeap;
910 m_pDualCamCmdHeap = NULL;
911 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700912 }
913
Thierry Strudel3d639192016-09-09 11:52:26 -0700914 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
915 mCameraHandle = NULL;
916
917 //reset session id to some invalid id
918 pthread_mutex_lock(&gCamLock);
919 sessionId[mCameraId] = 0xDEADBEEF;
920 pthread_mutex_unlock(&gCamLock);
921
922 //Notify display HAL that there is no active camera session
923 //but avoid calling the same during bootup. Refer to openCamera
924 //for more details.
925 property_get("service.bootanim.exit", value, "0");
926 if (atoi(value) == 1) {
927 pthread_mutex_lock(&gCamLock);
928 if (--gNumCameraSessions == 0) {
929 setCameraLaunchStatus(false);
930 }
931 pthread_mutex_unlock(&gCamLock);
932 }
933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 if (mExifParams.debug_params) {
935 free(mExifParams.debug_params);
936 mExifParams.debug_params = NULL;
937 }
938 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
939 LOGW("Failed to release flash for camera id: %d",
940 mCameraId);
941 }
942 mState = CLOSED;
943 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
944 mCameraId, rc);
945 return rc;
946}
947
948/*===========================================================================
949 * FUNCTION : initialize
950 *
951 * DESCRIPTION: Initialize frameworks callback functions
952 *
953 * PARAMETERS :
954 * @callback_ops : callback function to frameworks
955 *
956 * RETURN :
957 *
958 *==========================================================================*/
959int QCamera3HardwareInterface::initialize(
960 const struct camera3_callback_ops *callback_ops)
961{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800962 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700963 int rc;
964
965 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
966 pthread_mutex_lock(&mMutex);
967
968 // Validate current state
969 switch (mState) {
970 case OPENED:
971 /* valid state */
972 break;
973 default:
974 LOGE("Invalid state %d", mState);
975 rc = -ENODEV;
976 goto err1;
977 }
978
979 rc = initParameters();
980 if (rc < 0) {
981 LOGE("initParamters failed %d", rc);
982 goto err1;
983 }
984 mCallbackOps = callback_ops;
985
986 mChannelHandle = mCameraHandle->ops->add_channel(
987 mCameraHandle->camera_handle, NULL, NULL, this);
988 if (mChannelHandle == 0) {
989 LOGE("add_channel failed");
990 rc = -ENOMEM;
991 pthread_mutex_unlock(&mMutex);
992 return rc;
993 }
994
995 pthread_mutex_unlock(&mMutex);
996 mCameraInitialized = true;
997 mState = INITIALIZED;
998 LOGI("X");
999 return 0;
1000
1001err1:
1002 pthread_mutex_unlock(&mMutex);
1003 return rc;
1004}
1005
1006/*===========================================================================
1007 * FUNCTION : validateStreamDimensions
1008 *
1009 * DESCRIPTION: Check if the configuration requested are those advertised
1010 *
1011 * PARAMETERS :
1012 * @stream_list : streams to be configured
1013 *
1014 * RETURN :
1015 *
1016 *==========================================================================*/
1017int QCamera3HardwareInterface::validateStreamDimensions(
1018 camera3_stream_configuration_t *streamList)
1019{
1020 int rc = NO_ERROR;
1021 size_t count = 0;
1022
1023 camera3_stream_t *inputStream = NULL;
1024 /*
1025 * Loop through all streams to find input stream if it exists*
1026 */
1027 for (size_t i = 0; i< streamList->num_streams; i++) {
1028 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1029 if (inputStream != NULL) {
1030 LOGE("Error, Multiple input streams requested");
1031 return -EINVAL;
1032 }
1033 inputStream = streamList->streams[i];
1034 }
1035 }
1036 /*
1037 * Loop through all streams requested in configuration
1038 * Check if unsupported sizes have been requested on any of them
1039 */
1040 for (size_t j = 0; j < streamList->num_streams; j++) {
1041 bool sizeFound = false;
1042 camera3_stream_t *newStream = streamList->streams[j];
1043
1044 uint32_t rotatedHeight = newStream->height;
1045 uint32_t rotatedWidth = newStream->width;
1046 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1047 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1048 rotatedHeight = newStream->width;
1049 rotatedWidth = newStream->height;
1050 }
1051
1052 /*
1053 * Sizes are different for each type of stream format check against
1054 * appropriate table.
1055 */
1056 switch (newStream->format) {
1057 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1058 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1059 case HAL_PIXEL_FORMAT_RAW10:
1060 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1061 for (size_t i = 0; i < count; i++) {
1062 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1063 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1064 sizeFound = true;
1065 break;
1066 }
1067 }
1068 break;
1069 case HAL_PIXEL_FORMAT_BLOB:
1070 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1071 /* Verify set size against generated sizes table */
1072 for (size_t i = 0; i < count; i++) {
1073 if (((int32_t)rotatedWidth ==
1074 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1075 ((int32_t)rotatedHeight ==
1076 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1077 sizeFound = true;
1078 break;
1079 }
1080 }
1081 break;
1082 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1083 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1084 default:
1085 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1086 || newStream->stream_type == CAMERA3_STREAM_INPUT
1087 || IS_USAGE_ZSL(newStream->usage)) {
1088 if (((int32_t)rotatedWidth ==
1089 gCamCapability[mCameraId]->active_array_size.width) &&
1090 ((int32_t)rotatedHeight ==
1091 gCamCapability[mCameraId]->active_array_size.height)) {
1092 sizeFound = true;
1093 break;
1094 }
1095 /* We could potentially break here to enforce ZSL stream
1096 * set from frameworks always is full active array size
1097 * but it is not clear from the spc if framework will always
1098 * follow that, also we have logic to override to full array
1099 * size, so keeping the logic lenient at the moment
1100 */
1101 }
1102 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1103 MAX_SIZES_CNT);
1104 for (size_t i = 0; i < count; i++) {
1105 if (((int32_t)rotatedWidth ==
1106 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1107 ((int32_t)rotatedHeight ==
1108 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1109 sizeFound = true;
1110 break;
1111 }
1112 }
1113 break;
1114 } /* End of switch(newStream->format) */
1115
1116 /* We error out even if a single stream has unsupported size set */
1117 if (!sizeFound) {
1118 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1119 rotatedWidth, rotatedHeight, newStream->format,
1120 gCamCapability[mCameraId]->active_array_size.width,
1121 gCamCapability[mCameraId]->active_array_size.height);
1122 rc = -EINVAL;
1123 break;
1124 }
1125 } /* End of for each stream */
1126 return rc;
1127}
1128
1129/*==============================================================================
1130 * FUNCTION : isSupportChannelNeeded
1131 *
1132 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1133 *
1134 * PARAMETERS :
1135 * @stream_list : streams to be configured
1136 * @stream_config_info : the config info for streams to be configured
1137 *
1138 * RETURN : Boolen true/false decision
1139 *
1140 *==========================================================================*/
1141bool QCamera3HardwareInterface::isSupportChannelNeeded(
1142 camera3_stream_configuration_t *streamList,
1143 cam_stream_size_info_t stream_config_info)
1144{
1145 uint32_t i;
1146 bool pprocRequested = false;
1147 /* Check for conditions where PProc pipeline does not have any streams*/
1148 for (i = 0; i < stream_config_info.num_streams; i++) {
1149 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1150 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1151 pprocRequested = true;
1152 break;
1153 }
1154 }
1155
1156 if (pprocRequested == false )
1157 return true;
1158
1159 /* Dummy stream needed if only raw or jpeg streams present */
1160 for (i = 0; i < streamList->num_streams; i++) {
1161 switch(streamList->streams[i]->format) {
1162 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1163 case HAL_PIXEL_FORMAT_RAW10:
1164 case HAL_PIXEL_FORMAT_RAW16:
1165 case HAL_PIXEL_FORMAT_BLOB:
1166 break;
1167 default:
1168 return false;
1169 }
1170 }
1171 return true;
1172}
1173
1174/*==============================================================================
1175 * FUNCTION : getSensorOutputSize
1176 *
1177 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1178 *
1179 * PARAMETERS :
1180 * @sensor_dim : sensor output dimension (output)
1181 *
1182 * RETURN : int32_t type of status
1183 * NO_ERROR -- success
1184 * none-zero failure code
1185 *
1186 *==========================================================================*/
1187int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1188{
1189 int32_t rc = NO_ERROR;
1190
1191 cam_dimension_t max_dim = {0, 0};
1192 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1193 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1194 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1195 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1196 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1197 }
1198
1199 clear_metadata_buffer(mParameters);
1200
1201 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1202 max_dim);
1203 if (rc != NO_ERROR) {
1204 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1205 return rc;
1206 }
1207
1208 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1209 if (rc != NO_ERROR) {
1210 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1211 return rc;
1212 }
1213
1214 clear_metadata_buffer(mParameters);
1215 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1216
1217 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1218 mParameters);
1219 if (rc != NO_ERROR) {
1220 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1221 return rc;
1222 }
1223
1224 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1225 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1226
1227 return rc;
1228}
1229
1230/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001231 * FUNCTION : addToPPFeatureMask
1232 *
1233 * DESCRIPTION: add additional features to pp feature mask based on
1234 * stream type and usecase
1235 *
1236 * PARAMETERS :
1237 * @stream_format : stream type for feature mask
1238 * @stream_idx : stream idx within postprocess_mask list to change
1239 *
1240 * RETURN : NULL
1241 *
1242 *==========================================================================*/
1243void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1244 uint32_t stream_idx)
1245{
1246 char feature_mask_value[PROPERTY_VALUE_MAX];
1247 cam_feature_mask_t feature_mask;
1248 int args_converted;
1249 int property_len;
1250
1251 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001252#ifdef _LE_CAMERA_
1253 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1254 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1255 property_len = property_get("persist.camera.hal3.feature",
1256 feature_mask_value, swtnr_feature_mask_value);
1257#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001258 property_len = property_get("persist.camera.hal3.feature",
1259 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001260#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001261 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1262 (feature_mask_value[1] == 'x')) {
1263 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1264 } else {
1265 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1266 }
1267 if (1 != args_converted) {
1268 feature_mask = 0;
1269 LOGE("Wrong feature mask %s", feature_mask_value);
1270 return;
1271 }
1272
1273 switch (stream_format) {
1274 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1275 /* Add LLVD to pp feature mask only if video hint is enabled */
1276 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1277 mStreamConfigInfo.postprocess_mask[stream_idx]
1278 |= CAM_QTI_FEATURE_SW_TNR;
1279 LOGH("Added SW TNR to pp feature mask");
1280 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1281 mStreamConfigInfo.postprocess_mask[stream_idx]
1282 |= CAM_QCOM_FEATURE_LLVD;
1283 LOGH("Added LLVD SeeMore to pp feature mask");
1284 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001285 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1286 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1287 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001289 break;
1290 }
1291 default:
1292 break;
1293 }
1294 LOGD("PP feature mask %llx",
1295 mStreamConfigInfo.postprocess_mask[stream_idx]);
1296}
1297
1298/*==============================================================================
1299 * FUNCTION : updateFpsInPreviewBuffer
1300 *
1301 * DESCRIPTION: update FPS information in preview buffer.
1302 *
1303 * PARAMETERS :
1304 * @metadata : pointer to metadata buffer
1305 * @frame_number: frame_number to look for in pending buffer list
1306 *
1307 * RETURN : None
1308 *
1309 *==========================================================================*/
1310void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1311 uint32_t frame_number)
1312{
1313 // Mark all pending buffers for this particular request
1314 // with corresponding framerate information
1315 for (List<PendingBuffersInRequest>::iterator req =
1316 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1317 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1318 for(List<PendingBufferInfo>::iterator j =
1319 req->mPendingBufferList.begin();
1320 j != req->mPendingBufferList.end(); j++) {
1321 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1322 if ((req->frame_number == frame_number) &&
1323 (channel->getStreamTypeMask() &
1324 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1325 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1326 CAM_INTF_PARM_FPS_RANGE, metadata) {
1327 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1328 struct private_handle_t *priv_handle =
1329 (struct private_handle_t *)(*(j->buffer));
1330 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1331 }
1332 }
1333 }
1334 }
1335}
1336
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001337/*==============================================================================
1338 * FUNCTION : updateTimeStampInPendingBuffers
1339 *
1340 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1341 * of a frame number
1342 *
1343 * PARAMETERS :
1344 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1345 * @timestamp : timestamp to be set
1346 *
1347 * RETURN : None
1348 *
1349 *==========================================================================*/
1350void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1351 uint32_t frameNumber, nsecs_t timestamp)
1352{
1353 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1354 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1355 if (req->frame_number != frameNumber)
1356 continue;
1357
1358 for (auto k = req->mPendingBufferList.begin();
1359 k != req->mPendingBufferList.end(); k++ ) {
1360 struct private_handle_t *priv_handle =
1361 (struct private_handle_t *) (*(k->buffer));
1362 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1363 }
1364 }
1365 return;
1366}
1367
Thierry Strudel3d639192016-09-09 11:52:26 -07001368/*===========================================================================
1369 * FUNCTION : configureStreams
1370 *
1371 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1372 * and output streams.
1373 *
1374 * PARAMETERS :
1375 * @stream_list : streams to be configured
1376 *
1377 * RETURN :
1378 *
1379 *==========================================================================*/
1380int QCamera3HardwareInterface::configureStreams(
1381 camera3_stream_configuration_t *streamList)
1382{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001383 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001384 int rc = 0;
1385
1386 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001387 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001388 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001389 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001390
1391 return rc;
1392}
1393
1394/*===========================================================================
1395 * FUNCTION : configureStreamsPerfLocked
1396 *
1397 * DESCRIPTION: configureStreams while perfLock is held.
1398 *
1399 * PARAMETERS :
1400 * @stream_list : streams to be configured
1401 *
1402 * RETURN : int32_t type of status
1403 * NO_ERROR -- success
1404 * none-zero failure code
1405 *==========================================================================*/
1406int QCamera3HardwareInterface::configureStreamsPerfLocked(
1407 camera3_stream_configuration_t *streamList)
1408{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001409 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001410 int rc = 0;
1411
1412 // Sanity check stream_list
1413 if (streamList == NULL) {
1414 LOGE("NULL stream configuration");
1415 return BAD_VALUE;
1416 }
1417 if (streamList->streams == NULL) {
1418 LOGE("NULL stream list");
1419 return BAD_VALUE;
1420 }
1421
1422 if (streamList->num_streams < 1) {
1423 LOGE("Bad number of streams requested: %d",
1424 streamList->num_streams);
1425 return BAD_VALUE;
1426 }
1427
1428 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1429 LOGE("Maximum number of streams %d exceeded: %d",
1430 MAX_NUM_STREAMS, streamList->num_streams);
1431 return BAD_VALUE;
1432 }
1433
1434 mOpMode = streamList->operation_mode;
1435 LOGD("mOpMode: %d", mOpMode);
1436
1437 /* first invalidate all the steams in the mStreamList
1438 * if they appear again, they will be validated */
1439 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1440 it != mStreamInfo.end(); it++) {
1441 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1442 if (channel) {
1443 channel->stop();
1444 }
1445 (*it)->status = INVALID;
1446 }
1447
1448 if (mRawDumpChannel) {
1449 mRawDumpChannel->stop();
1450 delete mRawDumpChannel;
1451 mRawDumpChannel = NULL;
1452 }
1453
1454 if (mSupportChannel)
1455 mSupportChannel->stop();
1456
1457 if (mAnalysisChannel) {
1458 mAnalysisChannel->stop();
1459 }
1460 if (mMetadataChannel) {
1461 /* If content of mStreamInfo is not 0, there is metadata stream */
1462 mMetadataChannel->stop();
1463 }
1464 if (mChannelHandle) {
1465 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1466 mChannelHandle);
1467 LOGD("stopping channel %d", mChannelHandle);
1468 }
1469
1470 pthread_mutex_lock(&mMutex);
1471
1472 // Check state
1473 switch (mState) {
1474 case INITIALIZED:
1475 case CONFIGURED:
1476 case STARTED:
1477 /* valid state */
1478 break;
1479 default:
1480 LOGE("Invalid state %d", mState);
1481 pthread_mutex_unlock(&mMutex);
1482 return -ENODEV;
1483 }
1484
1485 /* Check whether we have video stream */
1486 m_bIs4KVideo = false;
1487 m_bIsVideo = false;
1488 m_bEisSupportedSize = false;
1489 m_bTnrEnabled = false;
1490 bool isZsl = false;
1491 uint32_t videoWidth = 0U;
1492 uint32_t videoHeight = 0U;
1493 size_t rawStreamCnt = 0;
1494 size_t stallStreamCnt = 0;
1495 size_t processedStreamCnt = 0;
1496 // Number of streams on ISP encoder path
1497 size_t numStreamsOnEncoder = 0;
1498 size_t numYuv888OnEncoder = 0;
1499 bool bYuv888OverrideJpeg = false;
1500 cam_dimension_t largeYuv888Size = {0, 0};
1501 cam_dimension_t maxViewfinderSize = {0, 0};
1502 bool bJpegExceeds4K = false;
1503 bool bJpegOnEncoder = false;
1504 bool bUseCommonFeatureMask = false;
1505 cam_feature_mask_t commonFeatureMask = 0;
1506 bool bSmallJpegSize = false;
1507 uint32_t width_ratio;
1508 uint32_t height_ratio;
1509 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1510 camera3_stream_t *inputStream = NULL;
1511 bool isJpeg = false;
1512 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001513 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001514
1515 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1516
1517 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001518 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001519 uint8_t eis_prop_set;
1520 uint32_t maxEisWidth = 0;
1521 uint32_t maxEisHeight = 0;
1522
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001523 // Initialize all instant AEC related variables
1524 mInstantAEC = false;
1525 mResetInstantAEC = false;
1526 mInstantAECSettledFrameNumber = 0;
1527 mAecSkipDisplayFrameBound = 0;
1528 mInstantAecFrameIdxCount = 0;
1529
Thierry Strudel3d639192016-09-09 11:52:26 -07001530 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1531
1532 size_t count = IS_TYPE_MAX;
1533 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1534 for (size_t i = 0; i < count; i++) {
1535 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001536 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1537 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001538 break;
1539 }
1540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001541 count = CAM_OPT_STAB_MAX;
1542 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1543 for (size_t i = 0; i < count; i++) {
1544 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1545 oisSupported = true;
1546 break;
1547 }
1548 }
1549
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001550 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001551 maxEisWidth = MAX_EIS_WIDTH;
1552 maxEisHeight = MAX_EIS_HEIGHT;
1553 }
1554
1555 /* EIS setprop control */
1556 char eis_prop[PROPERTY_VALUE_MAX];
1557 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001558 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 eis_prop_set = (uint8_t)atoi(eis_prop);
1560
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001561 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001562 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1563
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001564 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1565 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1566
Thierry Strudel3d639192016-09-09 11:52:26 -07001567 /* stream configurations */
1568 for (size_t i = 0; i < streamList->num_streams; i++) {
1569 camera3_stream_t *newStream = streamList->streams[i];
1570 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1571 "height = %d, rotation = %d, usage = 0x%x",
1572 i, newStream->stream_type, newStream->format,
1573 newStream->width, newStream->height, newStream->rotation,
1574 newStream->usage);
1575 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1576 newStream->stream_type == CAMERA3_STREAM_INPUT){
1577 isZsl = true;
1578 }
1579 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1580 inputStream = newStream;
1581 }
1582
1583 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1584 isJpeg = true;
1585 jpegSize.width = newStream->width;
1586 jpegSize.height = newStream->height;
1587 if (newStream->width > VIDEO_4K_WIDTH ||
1588 newStream->height > VIDEO_4K_HEIGHT)
1589 bJpegExceeds4K = true;
1590 }
1591
1592 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1593 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1594 m_bIsVideo = true;
1595 videoWidth = newStream->width;
1596 videoHeight = newStream->height;
1597 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1598 (VIDEO_4K_HEIGHT <= newStream->height)) {
1599 m_bIs4KVideo = true;
1600 }
1601 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1602 (newStream->height <= maxEisHeight);
1603 }
1604 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1605 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1606 switch (newStream->format) {
1607 case HAL_PIXEL_FORMAT_BLOB:
1608 stallStreamCnt++;
1609 if (isOnEncoder(maxViewfinderSize, newStream->width,
1610 newStream->height)) {
1611 numStreamsOnEncoder++;
1612 bJpegOnEncoder = true;
1613 }
1614 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1615 newStream->width);
1616 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1617 newStream->height);;
1618 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1619 "FATAL: max_downscale_factor cannot be zero and so assert");
1620 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1621 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1622 LOGH("Setting small jpeg size flag to true");
1623 bSmallJpegSize = true;
1624 }
1625 break;
1626 case HAL_PIXEL_FORMAT_RAW10:
1627 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1628 case HAL_PIXEL_FORMAT_RAW16:
1629 rawStreamCnt++;
1630 break;
1631 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1632 processedStreamCnt++;
1633 if (isOnEncoder(maxViewfinderSize, newStream->width,
1634 newStream->height)) {
1635 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1636 !IS_USAGE_ZSL(newStream->usage)) {
1637 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1638 }
1639 numStreamsOnEncoder++;
1640 }
1641 break;
1642 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1643 processedStreamCnt++;
1644 if (isOnEncoder(maxViewfinderSize, newStream->width,
1645 newStream->height)) {
1646 // If Yuv888 size is not greater than 4K, set feature mask
1647 // to SUPERSET so that it support concurrent request on
1648 // YUV and JPEG.
1649 if (newStream->width <= VIDEO_4K_WIDTH &&
1650 newStream->height <= VIDEO_4K_HEIGHT) {
1651 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1652 }
1653 numStreamsOnEncoder++;
1654 numYuv888OnEncoder++;
1655 largeYuv888Size.width = newStream->width;
1656 largeYuv888Size.height = newStream->height;
1657 }
1658 break;
1659 default:
1660 processedStreamCnt++;
1661 if (isOnEncoder(maxViewfinderSize, newStream->width,
1662 newStream->height)) {
1663 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1664 numStreamsOnEncoder++;
1665 }
1666 break;
1667 }
1668
1669 }
1670 }
1671
1672 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1673 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1674 !m_bIsVideo) {
1675 m_bEisEnable = false;
1676 }
1677
1678 /* Logic to enable/disable TNR based on specific config size/etc.*/
1679 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1680 ((videoWidth == 1920 && videoHeight == 1080) ||
1681 (videoWidth == 1280 && videoHeight == 720)) &&
1682 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1683 m_bTnrEnabled = true;
1684
1685 /* Check if num_streams is sane */
1686 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1687 rawStreamCnt > MAX_RAW_STREAMS ||
1688 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1689 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1690 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1691 pthread_mutex_unlock(&mMutex);
1692 return -EINVAL;
1693 }
1694 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001695 if (isZsl && m_bIs4KVideo) {
1696 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001697 pthread_mutex_unlock(&mMutex);
1698 return -EINVAL;
1699 }
1700 /* Check if stream sizes are sane */
1701 if (numStreamsOnEncoder > 2) {
1702 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1703 pthread_mutex_unlock(&mMutex);
1704 return -EINVAL;
1705 } else if (1 < numStreamsOnEncoder){
1706 bUseCommonFeatureMask = true;
1707 LOGH("Multiple streams above max viewfinder size, common mask needed");
1708 }
1709
1710 /* Check if BLOB size is greater than 4k in 4k recording case */
1711 if (m_bIs4KVideo && bJpegExceeds4K) {
1712 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1713 pthread_mutex_unlock(&mMutex);
1714 return -EINVAL;
1715 }
1716
1717 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1718 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1719 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1720 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1721 // configurations:
1722 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1723 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1724 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1725 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1726 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1727 __func__);
1728 pthread_mutex_unlock(&mMutex);
1729 return -EINVAL;
1730 }
1731
1732 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1733 // the YUV stream's size is greater or equal to the JPEG size, set common
1734 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1735 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1736 jpegSize.width, jpegSize.height) &&
1737 largeYuv888Size.width > jpegSize.width &&
1738 largeYuv888Size.height > jpegSize.height) {
1739 bYuv888OverrideJpeg = true;
1740 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1741 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1742 }
1743
1744 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1745 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1746 commonFeatureMask);
1747 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1748 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1749
1750 rc = validateStreamDimensions(streamList);
1751 if (rc == NO_ERROR) {
1752 rc = validateStreamRotations(streamList);
1753 }
1754 if (rc != NO_ERROR) {
1755 LOGE("Invalid stream configuration requested!");
1756 pthread_mutex_unlock(&mMutex);
1757 return rc;
1758 }
1759
1760 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1761 for (size_t i = 0; i < streamList->num_streams; i++) {
1762 camera3_stream_t *newStream = streamList->streams[i];
1763 LOGH("newStream type = %d, stream format = %d "
1764 "stream size : %d x %d, stream rotation = %d",
1765 newStream->stream_type, newStream->format,
1766 newStream->width, newStream->height, newStream->rotation);
1767 //if the stream is in the mStreamList validate it
1768 bool stream_exists = false;
1769 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1770 it != mStreamInfo.end(); it++) {
1771 if ((*it)->stream == newStream) {
1772 QCamera3ProcessingChannel *channel =
1773 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1774 stream_exists = true;
1775 if (channel)
1776 delete channel;
1777 (*it)->status = VALID;
1778 (*it)->stream->priv = NULL;
1779 (*it)->channel = NULL;
1780 }
1781 }
1782 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1783 //new stream
1784 stream_info_t* stream_info;
1785 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1786 if (!stream_info) {
1787 LOGE("Could not allocate stream info");
1788 rc = -ENOMEM;
1789 pthread_mutex_unlock(&mMutex);
1790 return rc;
1791 }
1792 stream_info->stream = newStream;
1793 stream_info->status = VALID;
1794 stream_info->channel = NULL;
1795 mStreamInfo.push_back(stream_info);
1796 }
1797 /* Covers Opaque ZSL and API1 F/W ZSL */
1798 if (IS_USAGE_ZSL(newStream->usage)
1799 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1800 if (zslStream != NULL) {
1801 LOGE("Multiple input/reprocess streams requested!");
1802 pthread_mutex_unlock(&mMutex);
1803 return BAD_VALUE;
1804 }
1805 zslStream = newStream;
1806 }
1807 /* Covers YUV reprocess */
1808 if (inputStream != NULL) {
1809 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1810 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1811 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1812 && inputStream->width == newStream->width
1813 && inputStream->height == newStream->height) {
1814 if (zslStream != NULL) {
1815 /* This scenario indicates multiple YUV streams with same size
1816 * as input stream have been requested, since zsl stream handle
1817 * is solely use for the purpose of overriding the size of streams
1818 * which share h/w streams we will just make a guess here as to
1819 * which of the stream is a ZSL stream, this will be refactored
1820 * once we make generic logic for streams sharing encoder output
1821 */
1822 LOGH("Warning, Multiple ip/reprocess streams requested!");
1823 }
1824 zslStream = newStream;
1825 }
1826 }
1827 }
1828
1829 /* If a zsl stream is set, we know that we have configured at least one input or
1830 bidirectional stream */
1831 if (NULL != zslStream) {
1832 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1833 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1834 mInputStreamInfo.format = zslStream->format;
1835 mInputStreamInfo.usage = zslStream->usage;
1836 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1837 mInputStreamInfo.dim.width,
1838 mInputStreamInfo.dim.height,
1839 mInputStreamInfo.format, mInputStreamInfo.usage);
1840 }
1841
1842 cleanAndSortStreamInfo();
1843 if (mMetadataChannel) {
1844 delete mMetadataChannel;
1845 mMetadataChannel = NULL;
1846 }
1847 if (mSupportChannel) {
1848 delete mSupportChannel;
1849 mSupportChannel = NULL;
1850 }
1851
1852 if (mAnalysisChannel) {
1853 delete mAnalysisChannel;
1854 mAnalysisChannel = NULL;
1855 }
1856
1857 if (mDummyBatchChannel) {
1858 delete mDummyBatchChannel;
1859 mDummyBatchChannel = NULL;
1860 }
1861
1862 //Create metadata channel and initialize it
1863 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1864 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1865 gCamCapability[mCameraId]->color_arrangement);
1866 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1867 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001868 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 if (mMetadataChannel == NULL) {
1870 LOGE("failed to allocate metadata channel");
1871 rc = -ENOMEM;
1872 pthread_mutex_unlock(&mMutex);
1873 return rc;
1874 }
1875 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1876 if (rc < 0) {
1877 LOGE("metadata channel initialization failed");
1878 delete mMetadataChannel;
1879 mMetadataChannel = NULL;
1880 pthread_mutex_unlock(&mMutex);
1881 return rc;
1882 }
1883
Thierry Strudel3d639192016-09-09 11:52:26 -07001884 bool isRawStreamRequested = false;
1885 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1886 /* Allocate channel objects for the requested streams */
1887 for (size_t i = 0; i < streamList->num_streams; i++) {
1888 camera3_stream_t *newStream = streamList->streams[i];
1889 uint32_t stream_usage = newStream->usage;
1890 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1891 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1892 struct camera_info *p_info = NULL;
1893 pthread_mutex_lock(&gCamLock);
1894 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1895 pthread_mutex_unlock(&gCamLock);
1896 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1897 || IS_USAGE_ZSL(newStream->usage)) &&
1898 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1899 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1900 if (bUseCommonFeatureMask) {
1901 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1902 commonFeatureMask;
1903 } else {
1904 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1905 CAM_QCOM_FEATURE_NONE;
1906 }
1907
1908 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1909 LOGH("Input stream configured, reprocess config");
1910 } else {
1911 //for non zsl streams find out the format
1912 switch (newStream->format) {
1913 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1914 {
1915 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1916 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1917 /* add additional features to pp feature mask */
1918 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1919 mStreamConfigInfo.num_streams);
1920
1921 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1922 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1923 CAM_STREAM_TYPE_VIDEO;
1924 if (m_bTnrEnabled && m_bTnrVideo) {
1925 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1926 CAM_QCOM_FEATURE_CPP_TNR;
1927 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1928 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1929 ~CAM_QCOM_FEATURE_CDS;
1930 }
1931 } else {
1932 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1933 CAM_STREAM_TYPE_PREVIEW;
1934 if (m_bTnrEnabled && m_bTnrPreview) {
1935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1936 CAM_QCOM_FEATURE_CPP_TNR;
1937 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1938 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1939 ~CAM_QCOM_FEATURE_CDS;
1940 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001941 if(!m_bSwTnrPreview) {
1942 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1943 ~CAM_QTI_FEATURE_SW_TNR;
1944 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001945 padding_info.width_padding = mSurfaceStridePadding;
1946 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001947 previewSize.width = (int32_t)newStream->width;
1948 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 }
1950 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1951 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1952 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1953 newStream->height;
1954 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1955 newStream->width;
1956 }
1957 }
1958 break;
1959 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1960 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1961 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1962 if (bUseCommonFeatureMask)
1963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1964 commonFeatureMask;
1965 else
1966 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1967 CAM_QCOM_FEATURE_NONE;
1968 } else {
1969 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1970 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1971 }
1972 break;
1973 case HAL_PIXEL_FORMAT_BLOB:
1974 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1975 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1976 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1977 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1978 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1979 } else {
1980 if (bUseCommonFeatureMask &&
1981 isOnEncoder(maxViewfinderSize, newStream->width,
1982 newStream->height)) {
1983 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1984 } else {
1985 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1986 }
1987 }
1988 if (isZsl) {
1989 if (zslStream) {
1990 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1991 (int32_t)zslStream->width;
1992 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1993 (int32_t)zslStream->height;
1994 } else {
1995 LOGE("Error, No ZSL stream identified");
1996 pthread_mutex_unlock(&mMutex);
1997 return -EINVAL;
1998 }
1999 } else if (m_bIs4KVideo) {
2000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2001 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2002 } else if (bYuv888OverrideJpeg) {
2003 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2004 (int32_t)largeYuv888Size.width;
2005 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2006 (int32_t)largeYuv888Size.height;
2007 }
2008 break;
2009 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2010 case HAL_PIXEL_FORMAT_RAW16:
2011 case HAL_PIXEL_FORMAT_RAW10:
2012 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2013 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2014 isRawStreamRequested = true;
2015 break;
2016 default:
2017 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2018 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2019 break;
2020 }
2021 }
2022
2023 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2024 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2025 gCamCapability[mCameraId]->color_arrangement);
2026
2027 if (newStream->priv == NULL) {
2028 //New stream, construct channel
2029 switch (newStream->stream_type) {
2030 case CAMERA3_STREAM_INPUT:
2031 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2032 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2033 break;
2034 case CAMERA3_STREAM_BIDIRECTIONAL:
2035 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2036 GRALLOC_USAGE_HW_CAMERA_WRITE;
2037 break;
2038 case CAMERA3_STREAM_OUTPUT:
2039 /* For video encoding stream, set read/write rarely
2040 * flag so that they may be set to un-cached */
2041 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2042 newStream->usage |=
2043 (GRALLOC_USAGE_SW_READ_RARELY |
2044 GRALLOC_USAGE_SW_WRITE_RARELY |
2045 GRALLOC_USAGE_HW_CAMERA_WRITE);
2046 else if (IS_USAGE_ZSL(newStream->usage))
2047 {
2048 LOGD("ZSL usage flag skipping");
2049 }
2050 else if (newStream == zslStream
2051 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2052 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2053 } else
2054 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2055 break;
2056 default:
2057 LOGE("Invalid stream_type %d", newStream->stream_type);
2058 break;
2059 }
2060
2061 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2062 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2063 QCamera3ProcessingChannel *channel = NULL;
2064 switch (newStream->format) {
2065 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2066 if ((newStream->usage &
2067 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2068 (streamList->operation_mode ==
2069 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2070 ) {
2071 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2072 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002073 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002074 this,
2075 newStream,
2076 (cam_stream_type_t)
2077 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2078 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2079 mMetadataChannel,
2080 0); //heap buffers are not required for HFR video channel
2081 if (channel == NULL) {
2082 LOGE("allocation of channel failed");
2083 pthread_mutex_unlock(&mMutex);
2084 return -ENOMEM;
2085 }
2086 //channel->getNumBuffers() will return 0 here so use
2087 //MAX_INFLIGH_HFR_REQUESTS
2088 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2089 newStream->priv = channel;
2090 LOGI("num video buffers in HFR mode: %d",
2091 MAX_INFLIGHT_HFR_REQUESTS);
2092 } else {
2093 /* Copy stream contents in HFR preview only case to create
2094 * dummy batch channel so that sensor streaming is in
2095 * HFR mode */
2096 if (!m_bIsVideo && (streamList->operation_mode ==
2097 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2098 mDummyBatchStream = *newStream;
2099 }
2100 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2101 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002102 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 this,
2104 newStream,
2105 (cam_stream_type_t)
2106 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2107 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2108 mMetadataChannel,
2109 MAX_INFLIGHT_REQUESTS);
2110 if (channel == NULL) {
2111 LOGE("allocation of channel failed");
2112 pthread_mutex_unlock(&mMutex);
2113 return -ENOMEM;
2114 }
2115 newStream->max_buffers = channel->getNumBuffers();
2116 newStream->priv = channel;
2117 }
2118 break;
2119 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2120 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2121 mChannelHandle,
2122 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002123 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002124 this,
2125 newStream,
2126 (cam_stream_type_t)
2127 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2128 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2129 mMetadataChannel);
2130 if (channel == NULL) {
2131 LOGE("allocation of YUV channel failed");
2132 pthread_mutex_unlock(&mMutex);
2133 return -ENOMEM;
2134 }
2135 newStream->max_buffers = channel->getNumBuffers();
2136 newStream->priv = channel;
2137 break;
2138 }
2139 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2140 case HAL_PIXEL_FORMAT_RAW16:
2141 case HAL_PIXEL_FORMAT_RAW10:
2142 mRawChannel = new QCamera3RawChannel(
2143 mCameraHandle->camera_handle, mChannelHandle,
2144 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002145 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002146 this, newStream,
2147 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2148 mMetadataChannel,
2149 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2150 if (mRawChannel == NULL) {
2151 LOGE("allocation of raw channel failed");
2152 pthread_mutex_unlock(&mMutex);
2153 return -ENOMEM;
2154 }
2155 newStream->max_buffers = mRawChannel->getNumBuffers();
2156 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2157 break;
2158 case HAL_PIXEL_FORMAT_BLOB:
2159 // Max live snapshot inflight buffer is 1. This is to mitigate
2160 // frame drop issues for video snapshot. The more buffers being
2161 // allocated, the more frame drops there are.
2162 mPictureChannel = new QCamera3PicChannel(
2163 mCameraHandle->camera_handle, mChannelHandle,
2164 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002165 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002166 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2167 m_bIs4KVideo, isZsl, mMetadataChannel,
2168 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2169 if (mPictureChannel == NULL) {
2170 LOGE("allocation of channel failed");
2171 pthread_mutex_unlock(&mMutex);
2172 return -ENOMEM;
2173 }
2174 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2175 newStream->max_buffers = mPictureChannel->getNumBuffers();
2176 mPictureChannel->overrideYuvSize(
2177 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2178 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2179 break;
2180
2181 default:
2182 LOGE("not a supported format 0x%x", newStream->format);
2183 break;
2184 }
2185 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2186 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2187 } else {
2188 LOGE("Error, Unknown stream type");
2189 pthread_mutex_unlock(&mMutex);
2190 return -EINVAL;
2191 }
2192
2193 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2194 if (channel != NULL && channel->isUBWCEnabled()) {
2195 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002196 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2197 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002198 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2199 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2200 }
2201 }
2202
2203 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2204 it != mStreamInfo.end(); it++) {
2205 if ((*it)->stream == newStream) {
2206 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2207 break;
2208 }
2209 }
2210 } else {
2211 // Channel already exists for this stream
2212 // Do nothing for now
2213 }
2214 padding_info = gCamCapability[mCameraId]->padding_info;
2215
2216 /* Do not add entries for input stream in metastream info
2217 * since there is no real stream associated with it
2218 */
2219 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2220 mStreamConfigInfo.num_streams++;
2221 }
2222
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002223 // Create analysis stream all the time, even when h/w support is not available
2224 {
2225 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2226 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2227 gCamCapability[mCameraId]->color_arrangement);
2228 cam_analysis_info_t analysisInfo;
2229 int32_t ret = NO_ERROR;
2230 ret = mCommon.getAnalysisInfo(
2231 FALSE,
2232 analysisFeatureMask,
2233 &analysisInfo);
2234 if (ret == NO_ERROR) {
2235 cam_dimension_t analysisDim;
2236 analysisDim = mCommon.getMatchingDimension(previewSize,
2237 analysisInfo.analysis_recommended_res);
2238
2239 mAnalysisChannel = new QCamera3SupportChannel(
2240 mCameraHandle->camera_handle,
2241 mChannelHandle,
2242 mCameraHandle->ops,
2243 &analysisInfo.analysis_padding_info,
2244 analysisFeatureMask,
2245 CAM_STREAM_TYPE_ANALYSIS,
2246 &analysisDim,
2247 (analysisInfo.analysis_format
2248 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2249 : CAM_FORMAT_YUV_420_NV21),
2250 analysisInfo.hw_analysis_supported,
2251 gCamCapability[mCameraId]->color_arrangement,
2252 this,
2253 0); // force buffer count to 0
2254 } else {
2255 LOGW("getAnalysisInfo failed, ret = %d", ret);
2256 }
2257 if (!mAnalysisChannel) {
2258 LOGW("Analysis channel cannot be created");
2259 }
2260 }
2261
Thierry Strudel3d639192016-09-09 11:52:26 -07002262 //RAW DUMP channel
2263 if (mEnableRawDump && isRawStreamRequested == false){
2264 cam_dimension_t rawDumpSize;
2265 rawDumpSize = getMaxRawSize(mCameraId);
2266 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2267 setPAAFSupport(rawDumpFeatureMask,
2268 CAM_STREAM_TYPE_RAW,
2269 gCamCapability[mCameraId]->color_arrangement);
2270 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2271 mChannelHandle,
2272 mCameraHandle->ops,
2273 rawDumpSize,
2274 &padding_info,
2275 this, rawDumpFeatureMask);
2276 if (!mRawDumpChannel) {
2277 LOGE("Raw Dump channel cannot be created");
2278 pthread_mutex_unlock(&mMutex);
2279 return -ENOMEM;
2280 }
2281 }
2282
2283
2284 if (mAnalysisChannel) {
2285 cam_analysis_info_t analysisInfo;
2286 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2287 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2288 CAM_STREAM_TYPE_ANALYSIS;
2289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2290 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2291 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2292 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2293 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002294 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002295 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2296 &analysisInfo);
2297 if (rc != NO_ERROR) {
2298 LOGE("getAnalysisInfo failed, ret = %d", rc);
2299 pthread_mutex_unlock(&mMutex);
2300 return rc;
2301 }
2302 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002303 mCommon.getMatchingDimension(previewSize,
2304 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 mStreamConfigInfo.num_streams++;
2306 }
2307
2308 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2309 cam_analysis_info_t supportInfo;
2310 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2311 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2312 setPAAFSupport(callbackFeatureMask,
2313 CAM_STREAM_TYPE_CALLBACK,
2314 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002315 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002316 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002317 if (ret != NO_ERROR) {
2318 /* Ignore the error for Mono camera
2319 * because the PAAF bit mask is only set
2320 * for CAM_STREAM_TYPE_ANALYSIS stream type
2321 */
2322 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2323 LOGW("getAnalysisInfo failed, ret = %d", ret);
2324 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002325 }
2326 mSupportChannel = new QCamera3SupportChannel(
2327 mCameraHandle->camera_handle,
2328 mChannelHandle,
2329 mCameraHandle->ops,
2330 &gCamCapability[mCameraId]->padding_info,
2331 callbackFeatureMask,
2332 CAM_STREAM_TYPE_CALLBACK,
2333 &QCamera3SupportChannel::kDim,
2334 CAM_FORMAT_YUV_420_NV21,
2335 supportInfo.hw_analysis_supported,
2336 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002337 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002338 if (!mSupportChannel) {
2339 LOGE("dummy channel cannot be created");
2340 pthread_mutex_unlock(&mMutex);
2341 return -ENOMEM;
2342 }
2343 }
2344
2345 if (mSupportChannel) {
2346 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2347 QCamera3SupportChannel::kDim;
2348 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2349 CAM_STREAM_TYPE_CALLBACK;
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2351 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2352 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2353 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2354 gCamCapability[mCameraId]->color_arrangement);
2355 mStreamConfigInfo.num_streams++;
2356 }
2357
2358 if (mRawDumpChannel) {
2359 cam_dimension_t rawSize;
2360 rawSize = getMaxRawSize(mCameraId);
2361 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2362 rawSize;
2363 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2364 CAM_STREAM_TYPE_RAW;
2365 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2366 CAM_QCOM_FEATURE_NONE;
2367 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2368 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2369 gCamCapability[mCameraId]->color_arrangement);
2370 mStreamConfigInfo.num_streams++;
2371 }
2372 /* In HFR mode, if video stream is not added, create a dummy channel so that
2373 * ISP can create a batch mode even for preview only case. This channel is
2374 * never 'start'ed (no stream-on), it is only 'initialized' */
2375 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2376 !m_bIsVideo) {
2377 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2378 setPAAFSupport(dummyFeatureMask,
2379 CAM_STREAM_TYPE_VIDEO,
2380 gCamCapability[mCameraId]->color_arrangement);
2381 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2382 mChannelHandle,
2383 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002384 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002385 this,
2386 &mDummyBatchStream,
2387 CAM_STREAM_TYPE_VIDEO,
2388 dummyFeatureMask,
2389 mMetadataChannel);
2390 if (NULL == mDummyBatchChannel) {
2391 LOGE("creation of mDummyBatchChannel failed."
2392 "Preview will use non-hfr sensor mode ");
2393 }
2394 }
2395 if (mDummyBatchChannel) {
2396 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2397 mDummyBatchStream.width;
2398 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2399 mDummyBatchStream.height;
2400 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2401 CAM_STREAM_TYPE_VIDEO;
2402 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2403 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2404 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2406 gCamCapability[mCameraId]->color_arrangement);
2407 mStreamConfigInfo.num_streams++;
2408 }
2409
2410 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2411 mStreamConfigInfo.buffer_info.max_buffers =
2412 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2413
2414 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2415 for (pendingRequestIterator i = mPendingRequestsList.begin();
2416 i != mPendingRequestsList.end();) {
2417 i = erasePendingRequest(i);
2418 }
2419 mPendingFrameDropList.clear();
2420 // Initialize/Reset the pending buffers list
2421 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2422 req.mPendingBufferList.clear();
2423 }
2424 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2425
2426 mPendingReprocessResultList.clear();
2427
2428 mCurJpegMeta.clear();
2429 //Get min frame duration for this streams configuration
2430 deriveMinFrameDuration();
2431
2432 // Update state
2433 mState = CONFIGURED;
2434
2435 pthread_mutex_unlock(&mMutex);
2436
2437 return rc;
2438}
2439
2440/*===========================================================================
2441 * FUNCTION : validateCaptureRequest
2442 *
2443 * DESCRIPTION: validate a capture request from camera service
2444 *
2445 * PARAMETERS :
2446 * @request : request from framework to process
2447 *
2448 * RETURN :
2449 *
2450 *==========================================================================*/
2451int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002452 camera3_capture_request_t *request,
2453 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002454{
2455 ssize_t idx = 0;
2456 const camera3_stream_buffer_t *b;
2457 CameraMetadata meta;
2458
2459 /* Sanity check the request */
2460 if (request == NULL) {
2461 LOGE("NULL capture request");
2462 return BAD_VALUE;
2463 }
2464
2465 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2466 /*settings cannot be null for the first request*/
2467 return BAD_VALUE;
2468 }
2469
2470 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2472 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002473 LOGE("Request %d: No output buffers provided!",
2474 __FUNCTION__, frameNumber);
2475 return BAD_VALUE;
2476 }
2477 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2478 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2479 request->num_output_buffers, MAX_NUM_STREAMS);
2480 return BAD_VALUE;
2481 }
2482 if (request->input_buffer != NULL) {
2483 b = request->input_buffer;
2484 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2485 LOGE("Request %d: Buffer %ld: Status not OK!",
2486 frameNumber, (long)idx);
2487 return BAD_VALUE;
2488 }
2489 if (b->release_fence != -1) {
2490 LOGE("Request %d: Buffer %ld: Has a release fence!",
2491 frameNumber, (long)idx);
2492 return BAD_VALUE;
2493 }
2494 if (b->buffer == NULL) {
2495 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2496 frameNumber, (long)idx);
2497 return BAD_VALUE;
2498 }
2499 }
2500
2501 // Validate all buffers
2502 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002503 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002504 QCamera3ProcessingChannel *channel =
2505 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2506 if (channel == NULL) {
2507 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2508 frameNumber, (long)idx);
2509 return BAD_VALUE;
2510 }
2511 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2512 LOGE("Request %d: Buffer %ld: Status not OK!",
2513 frameNumber, (long)idx);
2514 return BAD_VALUE;
2515 }
2516 if (b->release_fence != -1) {
2517 LOGE("Request %d: Buffer %ld: Has a release fence!",
2518 frameNumber, (long)idx);
2519 return BAD_VALUE;
2520 }
2521 if (b->buffer == NULL) {
2522 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2523 frameNumber, (long)idx);
2524 return BAD_VALUE;
2525 }
2526 if (*(b->buffer) == NULL) {
2527 LOGE("Request %d: Buffer %ld: NULL private handle!",
2528 frameNumber, (long)idx);
2529 return BAD_VALUE;
2530 }
2531 idx++;
2532 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002534 return NO_ERROR;
2535}
2536
2537/*===========================================================================
2538 * FUNCTION : deriveMinFrameDuration
2539 *
2540 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2541 * on currently configured streams.
2542 *
2543 * PARAMETERS : NONE
2544 *
2545 * RETURN : NONE
2546 *
2547 *==========================================================================*/
2548void QCamera3HardwareInterface::deriveMinFrameDuration()
2549{
2550 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2551
2552 maxJpegDim = 0;
2553 maxProcessedDim = 0;
2554 maxRawDim = 0;
2555
2556 // Figure out maximum jpeg, processed, and raw dimensions
2557 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2558 it != mStreamInfo.end(); it++) {
2559
2560 // Input stream doesn't have valid stream_type
2561 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2562 continue;
2563
2564 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2565 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2566 if (dimension > maxJpegDim)
2567 maxJpegDim = dimension;
2568 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2569 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2570 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2571 if (dimension > maxRawDim)
2572 maxRawDim = dimension;
2573 } else {
2574 if (dimension > maxProcessedDim)
2575 maxProcessedDim = dimension;
2576 }
2577 }
2578
2579 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2580 MAX_SIZES_CNT);
2581
2582 //Assume all jpeg dimensions are in processed dimensions.
2583 if (maxJpegDim > maxProcessedDim)
2584 maxProcessedDim = maxJpegDim;
2585 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2586 if (maxProcessedDim > maxRawDim) {
2587 maxRawDim = INT32_MAX;
2588
2589 for (size_t i = 0; i < count; i++) {
2590 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2591 gCamCapability[mCameraId]->raw_dim[i].height;
2592 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2593 maxRawDim = dimension;
2594 }
2595 }
2596
2597 //Find minimum durations for processed, jpeg, and raw
2598 for (size_t i = 0; i < count; i++) {
2599 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2600 gCamCapability[mCameraId]->raw_dim[i].height) {
2601 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2602 break;
2603 }
2604 }
2605 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2606 for (size_t i = 0; i < count; i++) {
2607 if (maxProcessedDim ==
2608 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2609 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2610 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2611 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2612 break;
2613 }
2614 }
2615}
2616
2617/*===========================================================================
2618 * FUNCTION : getMinFrameDuration
2619 *
2620 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2621 * and current request configuration.
2622 *
2623 * PARAMETERS : @request: requset sent by the frameworks
2624 *
2625 * RETURN : min farme duration for a particular request
2626 *
2627 *==========================================================================*/
2628int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2629{
2630 bool hasJpegStream = false;
2631 bool hasRawStream = false;
2632 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2633 const camera3_stream_t *stream = request->output_buffers[i].stream;
2634 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2635 hasJpegStream = true;
2636 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2637 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2638 stream->format == HAL_PIXEL_FORMAT_RAW16)
2639 hasRawStream = true;
2640 }
2641
2642 if (!hasJpegStream)
2643 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2644 else
2645 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2646}
2647
2648/*===========================================================================
2649 * FUNCTION : handleBuffersDuringFlushLock
2650 *
2651 * DESCRIPTION: Account for buffers returned from back-end during flush
2652 * This function is executed while mMutex is held by the caller.
2653 *
2654 * PARAMETERS :
2655 * @buffer: image buffer for the callback
2656 *
2657 * RETURN :
2658 *==========================================================================*/
2659void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2660{
2661 bool buffer_found = false;
2662 for (List<PendingBuffersInRequest>::iterator req =
2663 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2664 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2665 for (List<PendingBufferInfo>::iterator i =
2666 req->mPendingBufferList.begin();
2667 i != req->mPendingBufferList.end(); i++) {
2668 if (i->buffer == buffer->buffer) {
2669 mPendingBuffersMap.numPendingBufsAtFlush--;
2670 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2671 buffer->buffer, req->frame_number,
2672 mPendingBuffersMap.numPendingBufsAtFlush);
2673 buffer_found = true;
2674 break;
2675 }
2676 }
2677 if (buffer_found) {
2678 break;
2679 }
2680 }
2681 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2682 //signal the flush()
2683 LOGD("All buffers returned to HAL. Continue flush");
2684 pthread_cond_signal(&mBuffersCond);
2685 }
2686}
2687
2688
2689/*===========================================================================
2690 * FUNCTION : handlePendingReprocResults
2691 *
2692 * DESCRIPTION: check and notify on any pending reprocess results
2693 *
2694 * PARAMETERS :
2695 * @frame_number : Pending request frame number
2696 *
2697 * RETURN : int32_t type of status
2698 * NO_ERROR -- success
2699 * none-zero failure code
2700 *==========================================================================*/
2701int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2702{
2703 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2704 j != mPendingReprocessResultList.end(); j++) {
2705 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002706 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002707
2708 LOGD("Delayed reprocess notify %d",
2709 frame_number);
2710
2711 for (pendingRequestIterator k = mPendingRequestsList.begin();
2712 k != mPendingRequestsList.end(); k++) {
2713
2714 if (k->frame_number == j->frame_number) {
2715 LOGD("Found reprocess frame number %d in pending reprocess List "
2716 "Take it out!!",
2717 k->frame_number);
2718
2719 camera3_capture_result result;
2720 memset(&result, 0, sizeof(camera3_capture_result));
2721 result.frame_number = frame_number;
2722 result.num_output_buffers = 1;
2723 result.output_buffers = &j->buffer;
2724 result.input_buffer = k->input_buffer;
2725 result.result = k->settings;
2726 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002727 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002728
2729 erasePendingRequest(k);
2730 break;
2731 }
2732 }
2733 mPendingReprocessResultList.erase(j);
2734 break;
2735 }
2736 }
2737 return NO_ERROR;
2738}
2739
2740/*===========================================================================
2741 * FUNCTION : handleBatchMetadata
2742 *
2743 * DESCRIPTION: Handles metadata buffer callback in batch mode
2744 *
2745 * PARAMETERS : @metadata_buf: metadata buffer
2746 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2747 * the meta buf in this method
2748 *
2749 * RETURN :
2750 *
2751 *==========================================================================*/
2752void QCamera3HardwareInterface::handleBatchMetadata(
2753 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2754{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002755 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002756
2757 if (NULL == metadata_buf) {
2758 LOGE("metadata_buf is NULL");
2759 return;
2760 }
2761 /* In batch mode, the metdata will contain the frame number and timestamp of
2762 * the last frame in the batch. Eg: a batch containing buffers from request
2763 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2764 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2765 * multiple process_capture_results */
2766 metadata_buffer_t *metadata =
2767 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2768 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2769 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2770 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2771 uint32_t frame_number = 0, urgent_frame_number = 0;
2772 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2773 bool invalid_metadata = false;
2774 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2775 size_t loopCount = 1;
2776
2777 int32_t *p_frame_number_valid =
2778 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2779 uint32_t *p_frame_number =
2780 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2781 int64_t *p_capture_time =
2782 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2783 int32_t *p_urgent_frame_number_valid =
2784 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2785 uint32_t *p_urgent_frame_number =
2786 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2787
2788 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2789 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2790 (NULL == p_urgent_frame_number)) {
2791 LOGE("Invalid metadata");
2792 invalid_metadata = true;
2793 } else {
2794 frame_number_valid = *p_frame_number_valid;
2795 last_frame_number = *p_frame_number;
2796 last_frame_capture_time = *p_capture_time;
2797 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2798 last_urgent_frame_number = *p_urgent_frame_number;
2799 }
2800
2801 /* In batchmode, when no video buffers are requested, set_parms are sent
2802 * for every capture_request. The difference between consecutive urgent
2803 * frame numbers and frame numbers should be used to interpolate the
2804 * corresponding frame numbers and time stamps */
2805 pthread_mutex_lock(&mMutex);
2806 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002807 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2808 if(idx < 0) {
2809 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2810 last_urgent_frame_number);
2811 mState = ERROR;
2812 pthread_mutex_unlock(&mMutex);
2813 return;
2814 }
2815 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002816 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2817 first_urgent_frame_number;
2818
2819 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2820 urgent_frame_number_valid,
2821 first_urgent_frame_number, last_urgent_frame_number);
2822 }
2823
2824 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002825 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2826 if(idx < 0) {
2827 LOGE("Invalid frame number received: %d. Irrecoverable error",
2828 last_frame_number);
2829 mState = ERROR;
2830 pthread_mutex_unlock(&mMutex);
2831 return;
2832 }
2833 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002834 frameNumDiff = last_frame_number + 1 -
2835 first_frame_number;
2836 mPendingBatchMap.removeItem(last_frame_number);
2837
2838 LOGD("frm: valid: %d frm_num: %d - %d",
2839 frame_number_valid,
2840 first_frame_number, last_frame_number);
2841
2842 }
2843 pthread_mutex_unlock(&mMutex);
2844
2845 if (urgent_frame_number_valid || frame_number_valid) {
2846 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2847 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2848 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2849 urgentFrameNumDiff, last_urgent_frame_number);
2850 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2851 LOGE("frameNumDiff: %d frameNum: %d",
2852 frameNumDiff, last_frame_number);
2853 }
2854
2855 for (size_t i = 0; i < loopCount; i++) {
2856 /* handleMetadataWithLock is called even for invalid_metadata for
2857 * pipeline depth calculation */
2858 if (!invalid_metadata) {
2859 /* Infer frame number. Batch metadata contains frame number of the
2860 * last frame */
2861 if (urgent_frame_number_valid) {
2862 if (i < urgentFrameNumDiff) {
2863 urgent_frame_number =
2864 first_urgent_frame_number + i;
2865 LOGD("inferred urgent frame_number: %d",
2866 urgent_frame_number);
2867 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2868 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2869 } else {
2870 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2871 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2872 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2873 }
2874 }
2875
2876 /* Infer frame number. Batch metadata contains frame number of the
2877 * last frame */
2878 if (frame_number_valid) {
2879 if (i < frameNumDiff) {
2880 frame_number = first_frame_number + i;
2881 LOGD("inferred frame_number: %d", frame_number);
2882 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2883 CAM_INTF_META_FRAME_NUMBER, frame_number);
2884 } else {
2885 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2886 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2887 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2888 }
2889 }
2890
2891 if (last_frame_capture_time) {
2892 //Infer timestamp
2893 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002894 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002895 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002896 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002897 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2898 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2899 LOGD("batch capture_time: %lld, capture_time: %lld",
2900 last_frame_capture_time, capture_time);
2901 }
2902 }
2903 pthread_mutex_lock(&mMutex);
2904 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002905 false /* free_and_bufdone_meta_buf */,
2906 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002907 pthread_mutex_unlock(&mMutex);
2908 }
2909
2910 /* BufDone metadata buffer */
2911 if (free_and_bufdone_meta_buf) {
2912 mMetadataChannel->bufDone(metadata_buf);
2913 free(metadata_buf);
2914 }
2915}
2916
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002917void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2918 camera3_error_msg_code_t errorCode)
2919{
2920 camera3_notify_msg_t notify_msg;
2921 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2922 notify_msg.type = CAMERA3_MSG_ERROR;
2923 notify_msg.message.error.error_code = errorCode;
2924 notify_msg.message.error.error_stream = NULL;
2925 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002926 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002927
2928 return;
2929}
Thierry Strudel3d639192016-09-09 11:52:26 -07002930/*===========================================================================
2931 * FUNCTION : handleMetadataWithLock
2932 *
2933 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2934 *
2935 * PARAMETERS : @metadata_buf: metadata buffer
2936 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2937 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002938 * @firstMetadataInBatch: Boolean to indicate whether this is the
2939 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002940 *
2941 * RETURN :
2942 *
2943 *==========================================================================*/
2944void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002945 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2946 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002947{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002948 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2950 //during flush do not send metadata from this thread
2951 LOGD("not sending metadata during flush or when mState is error");
2952 if (free_and_bufdone_meta_buf) {
2953 mMetadataChannel->bufDone(metadata_buf);
2954 free(metadata_buf);
2955 }
2956 return;
2957 }
2958
2959 //not in flush
2960 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2961 int32_t frame_number_valid, urgent_frame_number_valid;
2962 uint32_t frame_number, urgent_frame_number;
2963 int64_t capture_time;
2964 nsecs_t currentSysTime;
2965
2966 int32_t *p_frame_number_valid =
2967 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2968 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2969 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2970 int32_t *p_urgent_frame_number_valid =
2971 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2972 uint32_t *p_urgent_frame_number =
2973 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2974 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2975 metadata) {
2976 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2977 *p_frame_number_valid, *p_frame_number);
2978 }
2979
2980 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2981 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2982 LOGE("Invalid metadata");
2983 if (free_and_bufdone_meta_buf) {
2984 mMetadataChannel->bufDone(metadata_buf);
2985 free(metadata_buf);
2986 }
2987 goto done_metadata;
2988 }
2989 frame_number_valid = *p_frame_number_valid;
2990 frame_number = *p_frame_number;
2991 capture_time = *p_capture_time;
2992 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2993 urgent_frame_number = *p_urgent_frame_number;
2994 currentSysTime = systemTime(CLOCK_MONOTONIC);
2995
2996 // Detect if buffers from any requests are overdue
2997 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2998 if ( (currentSysTime - req.timestamp) >
2999 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
3000 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003001 assert(missed.stream->priv);
3002 if (missed.stream->priv) {
3003 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3004 assert(ch->mStreams[0]);
3005 if (ch->mStreams[0]) {
3006 LOGE("Cancel missing frame = %d, buffer = %p,"
3007 "stream type = %d, stream format = %d",
3008 req.frame_number, missed.buffer,
3009 ch->mStreams[0]->getMyType(), missed.stream->format);
3010 ch->timeoutFrame(req.frame_number);
3011 }
3012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003013 }
3014 }
3015 }
3016 //Partial result on process_capture_result for timestamp
3017 if (urgent_frame_number_valid) {
3018 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3019 urgent_frame_number, capture_time);
3020
3021 //Recieved an urgent Frame Number, handle it
3022 //using partial results
3023 for (pendingRequestIterator i =
3024 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3025 LOGD("Iterator Frame = %d urgent frame = %d",
3026 i->frame_number, urgent_frame_number);
3027
3028 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3029 (i->partial_result_cnt == 0)) {
3030 LOGE("Error: HAL missed urgent metadata for frame number %d",
3031 i->frame_number);
3032 }
3033
3034 if (i->frame_number == urgent_frame_number &&
3035 i->bUrgentReceived == 0) {
3036
3037 camera3_capture_result_t result;
3038 memset(&result, 0, sizeof(camera3_capture_result_t));
3039
3040 i->partial_result_cnt++;
3041 i->bUrgentReceived = 1;
3042 // Extract 3A metadata
3043 result.result =
3044 translateCbUrgentMetadataToResultMetadata(metadata);
3045 // Populate metadata result
3046 result.frame_number = urgent_frame_number;
3047 result.num_output_buffers = 0;
3048 result.output_buffers = NULL;
3049 result.partial_result = i->partial_result_cnt;
3050
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003051 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003052 LOGD("urgent frame_number = %u, capture_time = %lld",
3053 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003054 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3055 // Instant AEC settled for this frame.
3056 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3057 mInstantAECSettledFrameNumber = urgent_frame_number;
3058 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003059 free_camera_metadata((camera_metadata_t *)result.result);
3060 break;
3061 }
3062 }
3063 }
3064
3065 if (!frame_number_valid) {
3066 LOGD("Not a valid normal frame number, used as SOF only");
3067 if (free_and_bufdone_meta_buf) {
3068 mMetadataChannel->bufDone(metadata_buf);
3069 free(metadata_buf);
3070 }
3071 goto done_metadata;
3072 }
3073 LOGH("valid frame_number = %u, capture_time = %lld",
3074 frame_number, capture_time);
3075
3076 for (pendingRequestIterator i = mPendingRequestsList.begin();
3077 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3078 // Flush out all entries with less or equal frame numbers.
3079
3080 camera3_capture_result_t result;
3081 memset(&result, 0, sizeof(camera3_capture_result_t));
3082
3083 LOGD("frame_number in the list is %u", i->frame_number);
3084 i->partial_result_cnt++;
3085 result.partial_result = i->partial_result_cnt;
3086
3087 // Check whether any stream buffer corresponding to this is dropped or not
3088 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003089 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3090 if (p_cam_frame_drop ||
3091 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003092 /* Clear notify_msg structure */
3093 camera3_notify_msg_t notify_msg;
3094 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3095 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3096 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003097 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003098 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3099 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003100 if (p_cam_frame_drop) {
3101 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003102 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003103 // Got the stream ID for drop frame.
3104 dropFrame = true;
3105 break;
3106 }
3107 }
3108 } else {
3109 // This is instant AEC case.
3110 // For instant AEC drop the stream untill AEC is settled.
3111 dropFrame = true;
3112 }
3113 if (dropFrame) {
3114 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3115 if (p_cam_frame_drop) {
3116 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003117 LOGE("Start of reporting error frame#=%u, streamID=%u",
3118 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003119 } else {
3120 // For instant AEC, inform frame drop and frame number
3121 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3122 "AEC settled frame number = %u",
3123 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3124 }
3125 notify_msg.type = CAMERA3_MSG_ERROR;
3126 notify_msg.message.error.frame_number = i->frame_number;
3127 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3128 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003129 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003130 if (p_cam_frame_drop) {
3131 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003132 LOGE("End of reporting error frame#=%u, streamID=%u",
3133 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003134 } else {
3135 // For instant AEC, inform frame drop and frame number
3136 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3137 "AEC settled frame number = %u",
3138 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3139 }
3140 PendingFrameDropInfo PendingFrameDrop;
3141 PendingFrameDrop.frame_number=i->frame_number;
3142 PendingFrameDrop.stream_ID = streamID;
3143 // Add the Frame drop info to mPendingFrameDropList
3144 mPendingFrameDropList.push_back(PendingFrameDrop);
3145 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003146 }
3147 }
3148
3149 // Send empty metadata with already filled buffers for dropped metadata
3150 // and send valid metadata with already filled buffers for current metadata
3151 /* we could hit this case when we either
3152 * 1. have a pending reprocess request or
3153 * 2. miss a metadata buffer callback */
3154 if (i->frame_number < frame_number) {
3155 if (i->input_buffer) {
3156 /* this will be handled in handleInputBufferWithLock */
3157 i++;
3158 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003159 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003160
3161 mPendingLiveRequest--;
3162
3163 CameraMetadata dummyMetadata;
3164 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3165 result.result = dummyMetadata.release();
3166
3167 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003168 }
3169 } else {
3170 mPendingLiveRequest--;
3171 /* Clear notify_msg structure */
3172 camera3_notify_msg_t notify_msg;
3173 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3174
3175 // Send shutter notify to frameworks
3176 notify_msg.type = CAMERA3_MSG_SHUTTER;
3177 notify_msg.message.shutter.frame_number = i->frame_number;
3178 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003179 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003180
3181 i->timestamp = capture_time;
3182
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003183 /* Set the timestamp in display metadata so that clients aware of
3184 private_handle such as VT can use this un-modified timestamps.
3185 Camera framework is unaware of this timestamp and cannot change this */
3186 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3187
Thierry Strudel3d639192016-09-09 11:52:26 -07003188 // Find channel requiring metadata, meaning internal offline postprocess
3189 // is needed.
3190 //TODO: for now, we don't support two streams requiring metadata at the same time.
3191 // (because we are not making copies, and metadata buffer is not reference counted.
3192 bool internalPproc = false;
3193 for (pendingBufferIterator iter = i->buffers.begin();
3194 iter != i->buffers.end(); iter++) {
3195 if (iter->need_metadata) {
3196 internalPproc = true;
3197 QCamera3ProcessingChannel *channel =
3198 (QCamera3ProcessingChannel *)iter->stream->priv;
3199 channel->queueReprocMetadata(metadata_buf);
3200 break;
3201 }
3202 }
3203
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003204 for (auto itr = i->internalRequestList.begin();
3205 itr != i->internalRequestList.end(); itr++) {
3206 if (itr->need_metadata) {
3207 internalPproc = true;
3208 QCamera3ProcessingChannel *channel =
3209 (QCamera3ProcessingChannel *)itr->stream->priv;
3210 channel->queueReprocMetadata(metadata_buf);
3211 break;
3212 }
3213 }
3214
3215
Thierry Strudel3d639192016-09-09 11:52:26 -07003216 result.result = translateFromHalMetadata(metadata,
3217 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003218 i->capture_intent,
3219 /* DevCamDebug metadata translateFromHalMetadata function call*/
3220 i->DevCamDebug_meta_enable,
3221 /* DevCamDebug metadata end */
3222 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003223 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003224
3225 saveExifParams(metadata);
3226
3227 if (i->blob_request) {
3228 {
3229 //Dump tuning metadata if enabled and available
3230 char prop[PROPERTY_VALUE_MAX];
3231 memset(prop, 0, sizeof(prop));
3232 property_get("persist.camera.dumpmetadata", prop, "0");
3233 int32_t enabled = atoi(prop);
3234 if (enabled && metadata->is_tuning_params_valid) {
3235 dumpMetadataToFile(metadata->tuning_params,
3236 mMetaFrameCount,
3237 enabled,
3238 "Snapshot",
3239 frame_number);
3240 }
3241 }
3242 }
3243
3244 if (!internalPproc) {
3245 LOGD("couldn't find need_metadata for this metadata");
3246 // Return metadata buffer
3247 if (free_and_bufdone_meta_buf) {
3248 mMetadataChannel->bufDone(metadata_buf);
3249 free(metadata_buf);
3250 }
3251 }
3252 }
3253 if (!result.result) {
3254 LOGE("metadata is NULL");
3255 }
3256 result.frame_number = i->frame_number;
3257 result.input_buffer = i->input_buffer;
3258 result.num_output_buffers = 0;
3259 result.output_buffers = NULL;
3260 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3261 j != i->buffers.end(); j++) {
3262 if (j->buffer) {
3263 result.num_output_buffers++;
3264 }
3265 }
3266
3267 updateFpsInPreviewBuffer(metadata, i->frame_number);
3268
3269 if (result.num_output_buffers > 0) {
3270 camera3_stream_buffer_t *result_buffers =
3271 new camera3_stream_buffer_t[result.num_output_buffers];
3272 if (result_buffers != NULL) {
3273 size_t result_buffers_idx = 0;
3274 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3275 j != i->buffers.end(); j++) {
3276 if (j->buffer) {
3277 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3278 m != mPendingFrameDropList.end(); m++) {
3279 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3280 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3281 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3282 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3283 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3284 frame_number, streamID);
3285 m = mPendingFrameDropList.erase(m);
3286 break;
3287 }
3288 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003289 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003290 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3291 result_buffers[result_buffers_idx++] = *(j->buffer);
3292 free(j->buffer);
3293 j->buffer = NULL;
3294 }
3295 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003296
Thierry Strudel3d639192016-09-09 11:52:26 -07003297 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003298 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003299 LOGD("meta frame_number = %u, capture_time = %lld",
3300 result.frame_number, i->timestamp);
3301 free_camera_metadata((camera_metadata_t *)result.result);
3302 delete[] result_buffers;
3303 }else {
3304 LOGE("Fatal error: out of memory");
3305 }
3306 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003307 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003308 LOGD("meta frame_number = %u, capture_time = %lld",
3309 result.frame_number, i->timestamp);
3310 free_camera_metadata((camera_metadata_t *)result.result);
3311 }
3312
3313 i = erasePendingRequest(i);
3314
3315 if (!mPendingReprocessResultList.empty()) {
3316 handlePendingReprocResults(frame_number + 1);
3317 }
3318 }
3319
3320done_metadata:
3321 for (pendingRequestIterator i = mPendingRequestsList.begin();
3322 i != mPendingRequestsList.end() ;i++) {
3323 i->pipeline_depth++;
3324 }
3325 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3326 unblockRequestIfNecessary();
3327}
3328
3329/*===========================================================================
3330 * FUNCTION : hdrPlusPerfLock
3331 *
3332 * DESCRIPTION: perf lock for HDR+ using custom intent
3333 *
3334 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3335 *
3336 * RETURN : None
3337 *
3338 *==========================================================================*/
3339void QCamera3HardwareInterface::hdrPlusPerfLock(
3340 mm_camera_super_buf_t *metadata_buf)
3341{
3342 if (NULL == metadata_buf) {
3343 LOGE("metadata_buf is NULL");
3344 return;
3345 }
3346 metadata_buffer_t *metadata =
3347 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3348 int32_t *p_frame_number_valid =
3349 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3350 uint32_t *p_frame_number =
3351 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3352
3353 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3354 LOGE("%s: Invalid metadata", __func__);
3355 return;
3356 }
3357
3358 //acquire perf lock for 5 sec after the last HDR frame is captured
3359 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3360 if ((p_frame_number != NULL) &&
3361 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003362 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003363 }
3364 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003365}
3366
3367/*===========================================================================
3368 * FUNCTION : handleInputBufferWithLock
3369 *
3370 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3371 *
3372 * PARAMETERS : @frame_number: frame number of the input buffer
3373 *
3374 * RETURN :
3375 *
3376 *==========================================================================*/
3377void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3378{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003379 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003380 pendingRequestIterator i = mPendingRequestsList.begin();
3381 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3382 i++;
3383 }
3384 if (i != mPendingRequestsList.end() && i->input_buffer) {
3385 //found the right request
3386 if (!i->shutter_notified) {
3387 CameraMetadata settings;
3388 camera3_notify_msg_t notify_msg;
3389 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3390 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3391 if(i->settings) {
3392 settings = i->settings;
3393 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3394 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3395 } else {
3396 LOGE("No timestamp in input settings! Using current one.");
3397 }
3398 } else {
3399 LOGE("Input settings missing!");
3400 }
3401
3402 notify_msg.type = CAMERA3_MSG_SHUTTER;
3403 notify_msg.message.shutter.frame_number = frame_number;
3404 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003405 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003406 i->shutter_notified = true;
3407 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3408 i->frame_number, notify_msg.message.shutter.timestamp);
3409 }
3410
3411 if (i->input_buffer->release_fence != -1) {
3412 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3413 close(i->input_buffer->release_fence);
3414 if (rc != OK) {
3415 LOGE("input buffer sync wait failed %d", rc);
3416 }
3417 }
3418
3419 camera3_capture_result result;
3420 memset(&result, 0, sizeof(camera3_capture_result));
3421 result.frame_number = frame_number;
3422 result.result = i->settings;
3423 result.input_buffer = i->input_buffer;
3424 result.partial_result = PARTIAL_RESULT_COUNT;
3425
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003426 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003427 LOGD("Input request metadata and input buffer frame_number = %u",
3428 i->frame_number);
3429 i = erasePendingRequest(i);
3430 } else {
3431 LOGE("Could not find input request for frame number %d", frame_number);
3432 }
3433}
3434
3435/*===========================================================================
3436 * FUNCTION : handleBufferWithLock
3437 *
3438 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3439 *
3440 * PARAMETERS : @buffer: image buffer for the callback
3441 * @frame_number: frame number of the image buffer
3442 *
3443 * RETURN :
3444 *
3445 *==========================================================================*/
3446void QCamera3HardwareInterface::handleBufferWithLock(
3447 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3448{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003449 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003450
3451 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3452 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3453 }
3454
Thierry Strudel3d639192016-09-09 11:52:26 -07003455 /* Nothing to be done during error state */
3456 if ((ERROR == mState) || (DEINIT == mState)) {
3457 return;
3458 }
3459 if (mFlushPerf) {
3460 handleBuffersDuringFlushLock(buffer);
3461 return;
3462 }
3463 //not in flush
3464 // If the frame number doesn't exist in the pending request list,
3465 // directly send the buffer to the frameworks, and update pending buffers map
3466 // Otherwise, book-keep the buffer.
3467 pendingRequestIterator i = mPendingRequestsList.begin();
3468 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3469 i++;
3470 }
3471 if (i == mPendingRequestsList.end()) {
3472 // Verify all pending requests frame_numbers are greater
3473 for (pendingRequestIterator j = mPendingRequestsList.begin();
3474 j != mPendingRequestsList.end(); j++) {
3475 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3476 LOGW("Error: pending live frame number %d is smaller than %d",
3477 j->frame_number, frame_number);
3478 }
3479 }
3480 camera3_capture_result_t result;
3481 memset(&result, 0, sizeof(camera3_capture_result_t));
3482 result.result = NULL;
3483 result.frame_number = frame_number;
3484 result.num_output_buffers = 1;
3485 result.partial_result = 0;
3486 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3487 m != mPendingFrameDropList.end(); m++) {
3488 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3489 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3490 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3491 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3492 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3493 frame_number, streamID);
3494 m = mPendingFrameDropList.erase(m);
3495 break;
3496 }
3497 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003498 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003499 result.output_buffers = buffer;
3500 LOGH("result frame_number = %d, buffer = %p",
3501 frame_number, buffer->buffer);
3502
3503 mPendingBuffersMap.removeBuf(buffer->buffer);
3504
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003505 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003506 } else {
3507 if (i->input_buffer) {
3508 CameraMetadata settings;
3509 camera3_notify_msg_t notify_msg;
3510 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3511 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3512 if(i->settings) {
3513 settings = i->settings;
3514 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3515 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3516 } else {
3517 LOGW("No timestamp in input settings! Using current one.");
3518 }
3519 } else {
3520 LOGE("Input settings missing!");
3521 }
3522
3523 notify_msg.type = CAMERA3_MSG_SHUTTER;
3524 notify_msg.message.shutter.frame_number = frame_number;
3525 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3526
3527 if (i->input_buffer->release_fence != -1) {
3528 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3529 close(i->input_buffer->release_fence);
3530 if (rc != OK) {
3531 LOGE("input buffer sync wait failed %d", rc);
3532 }
3533 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003534 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 mPendingBuffersMap.removeBuf(buffer->buffer);
3536
Thierry Strudel04e026f2016-10-10 11:27:36 -07003537 camera3_capture_result result;
3538 memset(&result, 0, sizeof(camera3_capture_result));
3539 result.frame_number = frame_number;
3540 result.result = i->settings;
3541 result.input_buffer = i->input_buffer;
3542 result.num_output_buffers = 1;
3543 result.output_buffers = buffer;
3544 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003545
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003546 orchestrateNotify(&notify_msg);
3547 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003548 LOGD("Notify reprocess now %d!", frame_number);
3549 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003550 } else {
3551 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3552 j != i->buffers.end(); j++) {
3553 if (j->stream == buffer->stream) {
3554 if (j->buffer != NULL) {
3555 LOGE("Error: buffer is already set");
3556 } else {
3557 j->buffer = (camera3_stream_buffer_t *)malloc(
3558 sizeof(camera3_stream_buffer_t));
3559 *(j->buffer) = *buffer;
3560 LOGH("cache buffer %p at result frame_number %u",
3561 buffer->buffer, frame_number);
3562 }
3563 }
3564 }
3565 }
3566 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003567
3568 if (mPreviewStarted == false) {
3569 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3570 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3571 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3572 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3573 mPreviewStarted = true;
3574
3575 // Set power hint for preview
3576 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3577 }
3578 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003579}
3580
3581/*===========================================================================
3582 * FUNCTION : unblockRequestIfNecessary
3583 *
3584 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3585 * that mMutex is held when this function is called.
3586 *
3587 * PARAMETERS :
3588 *
3589 * RETURN :
3590 *
3591 *==========================================================================*/
3592void QCamera3HardwareInterface::unblockRequestIfNecessary()
3593{
3594 // Unblock process_capture_request
3595 pthread_cond_signal(&mRequestCond);
3596}
3597
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003598/*===========================================================================
3599 * FUNCTION : isHdrSnapshotRequest
3600 *
3601 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3602 *
3603 * PARAMETERS : camera3 request structure
3604 *
3605 * RETURN : boolean decision variable
3606 *
3607 *==========================================================================*/
3608bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3609{
3610 if (request == NULL) {
3611 LOGE("Invalid request handle");
3612 assert(0);
3613 return false;
3614 }
3615
3616 if (!mForceHdrSnapshot) {
3617 CameraMetadata frame_settings;
3618 frame_settings = request->settings;
3619
3620 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3621 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3622 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3623 return false;
3624 }
3625 } else {
3626 return false;
3627 }
3628
3629 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3630 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3631 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3632 return false;
3633 }
3634 } else {
3635 return false;
3636 }
3637 }
3638
3639 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3640 if (request->output_buffers[i].stream->format
3641 == HAL_PIXEL_FORMAT_BLOB) {
3642 return true;
3643 }
3644 }
3645
3646 return false;
3647}
3648/*===========================================================================
3649 * FUNCTION : orchestrateRequest
3650 *
3651 * DESCRIPTION: Orchestrates a capture request from camera service
3652 *
3653 * PARAMETERS :
3654 * @request : request from framework to process
3655 *
3656 * RETURN : Error status codes
3657 *
3658 *==========================================================================*/
3659int32_t QCamera3HardwareInterface::orchestrateRequest(
3660 camera3_capture_request_t *request)
3661{
3662
3663 uint32_t originalFrameNumber = request->frame_number;
3664 uint32_t originalOutputCount = request->num_output_buffers;
3665 const camera_metadata_t *original_settings = request->settings;
3666 List<InternalRequest> internallyRequestedStreams;
3667 List<InternalRequest> emptyInternalList;
3668
3669 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3670 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3671 uint32_t internalFrameNumber;
3672 CameraMetadata modified_meta;
3673
3674
3675 /* Add Blob channel to list of internally requested streams */
3676 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3677 if (request->output_buffers[i].stream->format
3678 == HAL_PIXEL_FORMAT_BLOB) {
3679 InternalRequest streamRequested;
3680 streamRequested.meteringOnly = 1;
3681 streamRequested.need_metadata = 0;
3682 streamRequested.stream = request->output_buffers[i].stream;
3683 internallyRequestedStreams.push_back(streamRequested);
3684 }
3685 }
3686 request->num_output_buffers = 0;
3687 auto itr = internallyRequestedStreams.begin();
3688
3689 /* Modify setting to set compensation */
3690 modified_meta = request->settings;
3691 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3692 uint8_t aeLock = 1;
3693 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3694 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3695 camera_metadata_t *modified_settings = modified_meta.release();
3696 request->settings = modified_settings;
3697
3698 /* Capture Settling & -2x frame */
3699 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3700 request->frame_number = internalFrameNumber;
3701 processCaptureRequest(request, internallyRequestedStreams);
3702
3703 request->num_output_buffers = originalOutputCount;
3704 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3705 request->frame_number = internalFrameNumber;
3706 processCaptureRequest(request, emptyInternalList);
3707 request->num_output_buffers = 0;
3708
3709 modified_meta = modified_settings;
3710 expCompensation = 0;
3711 aeLock = 1;
3712 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3713 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3714 modified_settings = modified_meta.release();
3715 request->settings = modified_settings;
3716
3717 /* Capture Settling & 0X frame */
3718
3719 itr = internallyRequestedStreams.begin();
3720 if (itr == internallyRequestedStreams.end()) {
3721 LOGE("Error Internally Requested Stream list is empty");
3722 assert(0);
3723 } else {
3724 itr->need_metadata = 0;
3725 itr->meteringOnly = 1;
3726 }
3727
3728 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3729 request->frame_number = internalFrameNumber;
3730 processCaptureRequest(request, internallyRequestedStreams);
3731
3732 itr = internallyRequestedStreams.begin();
3733 if (itr == internallyRequestedStreams.end()) {
3734 ALOGE("Error Internally Requested Stream list is empty");
3735 assert(0);
3736 } else {
3737 itr->need_metadata = 1;
3738 itr->meteringOnly = 0;
3739 }
3740
3741 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3742 request->frame_number = internalFrameNumber;
3743 processCaptureRequest(request, internallyRequestedStreams);
3744
3745 /* Capture 2X frame*/
3746 modified_meta = modified_settings;
3747 expCompensation = GB_HDR_2X_STEP_EV;
3748 aeLock = 1;
3749 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3750 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3751 modified_settings = modified_meta.release();
3752 request->settings = modified_settings;
3753
3754 itr = internallyRequestedStreams.begin();
3755 if (itr == internallyRequestedStreams.end()) {
3756 ALOGE("Error Internally Requested Stream list is empty");
3757 assert(0);
3758 } else {
3759 itr->need_metadata = 0;
3760 itr->meteringOnly = 1;
3761 }
3762 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3763 request->frame_number = internalFrameNumber;
3764 processCaptureRequest(request, internallyRequestedStreams);
3765
3766 itr = internallyRequestedStreams.begin();
3767 if (itr == internallyRequestedStreams.end()) {
3768 ALOGE("Error Internally Requested Stream list is empty");
3769 assert(0);
3770 } else {
3771 itr->need_metadata = 1;
3772 itr->meteringOnly = 0;
3773 }
3774
3775 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3776 request->frame_number = internalFrameNumber;
3777 processCaptureRequest(request, internallyRequestedStreams);
3778
3779
3780 /* Capture 2X on original streaming config*/
3781 internallyRequestedStreams.clear();
3782
3783 /* Restore original settings pointer */
3784 request->settings = original_settings;
3785 } else {
3786 uint32_t internalFrameNumber;
3787 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3788 request->frame_number = internalFrameNumber;
3789 return processCaptureRequest(request, internallyRequestedStreams);
3790 }
3791
3792 return NO_ERROR;
3793}
3794
3795/*===========================================================================
3796 * FUNCTION : orchestrateResult
3797 *
3798 * DESCRIPTION: Orchestrates a capture result to camera service
3799 *
3800 * PARAMETERS :
3801 * @request : request from framework to process
3802 *
3803 * RETURN :
3804 *
3805 *==========================================================================*/
3806void QCamera3HardwareInterface::orchestrateResult(
3807 camera3_capture_result_t *result)
3808{
3809 uint32_t frameworkFrameNumber;
3810 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3811 frameworkFrameNumber);
3812 if (rc != NO_ERROR) {
3813 LOGE("Cannot find translated frameworkFrameNumber");
3814 assert(0);
3815 } else {
3816 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3817 LOGD("CAM_DEBUG Internal Request drop the result");
3818 } else {
3819 result->frame_number = frameworkFrameNumber;
3820 mCallbackOps->process_capture_result(mCallbackOps, result);
3821 }
3822 }
3823}
3824
3825/*===========================================================================
3826 * FUNCTION : orchestrateNotify
3827 *
3828 * DESCRIPTION: Orchestrates a notify to camera service
3829 *
3830 * PARAMETERS :
3831 * @request : request from framework to process
3832 *
3833 * RETURN :
3834 *
3835 *==========================================================================*/
3836void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3837{
3838 uint32_t frameworkFrameNumber;
3839 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3840 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3841 frameworkFrameNumber);
3842 if (rc != NO_ERROR) {
3843 LOGE("Cannot find translated frameworkFrameNumber");
3844 assert(0);
3845 } else {
3846 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3847 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3848 } else {
3849 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3850 mCallbackOps->notify(mCallbackOps, notify_msg);
3851 }
3852 }
3853}
3854
3855/*===========================================================================
3856 * FUNCTION : FrameNumberRegistry
3857 *
3858 * DESCRIPTION: Constructor
3859 *
3860 * PARAMETERS :
3861 *
3862 * RETURN :
3863 *
3864 *==========================================================================*/
3865FrameNumberRegistry::FrameNumberRegistry()
3866{
3867 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3868}
3869
3870/*===========================================================================
3871 * FUNCTION : ~FrameNumberRegistry
3872 *
3873 * DESCRIPTION: Destructor
3874 *
3875 * PARAMETERS :
3876 *
3877 * RETURN :
3878 *
3879 *==========================================================================*/
3880FrameNumberRegistry::~FrameNumberRegistry()
3881{
3882}
3883
3884/*===========================================================================
3885 * FUNCTION : PurgeOldEntriesLocked
3886 *
3887 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3888 *
3889 * PARAMETERS :
3890 *
3891 * RETURN : NONE
3892 *
3893 *==========================================================================*/
3894void FrameNumberRegistry::purgeOldEntriesLocked()
3895{
3896 while (_register.begin() != _register.end()) {
3897 auto itr = _register.begin();
3898 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3899 _register.erase(itr);
3900 } else {
3901 return;
3902 }
3903 }
3904}
3905
3906/*===========================================================================
3907 * FUNCTION : allocStoreInternalFrameNumber
3908 *
3909 * DESCRIPTION: Method to note down a framework request and associate a new
3910 * internal request number against it
3911 *
3912 * PARAMETERS :
3913 * @fFrameNumber: Identifier given by framework
3914 * @internalFN : Output parameter which will have the newly generated internal
3915 * entry
3916 *
3917 * RETURN : Error code
3918 *
3919 *==========================================================================*/
3920int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
3921 uint32_t &internalFrameNumber)
3922{
3923 Mutex::Autolock lock(mRegistryLock);
3924 internalFrameNumber = _nextFreeInternalNumber++;
3925 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
3926 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
3927 purgeOldEntriesLocked();
3928 return NO_ERROR;
3929}
3930
3931/*===========================================================================
3932 * FUNCTION : generateStoreInternalFrameNumber
3933 *
3934 * DESCRIPTION: Method to associate a new internal request number independent
3935 * of any associate with framework requests
3936 *
3937 * PARAMETERS :
3938 * @internalFrame#: Output parameter which will have the newly generated internal
3939 *
3940 *
3941 * RETURN : Error code
3942 *
3943 *==========================================================================*/
3944int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
3945{
3946 Mutex::Autolock lock(mRegistryLock);
3947 internalFrameNumber = _nextFreeInternalNumber++;
3948 LOGD("Generated internal framenumber:%d", internalFrameNumber);
3949 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
3950 purgeOldEntriesLocked();
3951 return NO_ERROR;
3952}
3953
3954/*===========================================================================
3955 * FUNCTION : getFrameworkFrameNumber
3956 *
3957 * DESCRIPTION: Method to query the framework framenumber given an internal #
3958 *
3959 * PARAMETERS :
3960 * @internalFrame#: Internal reference
3961 * @frameworkframenumber: Output parameter holding framework frame entry
3962 *
3963 * RETURN : Error code
3964 *
3965 *==========================================================================*/
3966int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
3967 uint32_t &frameworkFrameNumber)
3968{
3969 Mutex::Autolock lock(mRegistryLock);
3970 auto itr = _register.find(internalFrameNumber);
3971 if (itr == _register.end()) {
3972 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
3973 return -ENOENT;
3974 }
3975
3976 frameworkFrameNumber = itr->second;
3977 purgeOldEntriesLocked();
3978 return NO_ERROR;
3979}
Thierry Strudel3d639192016-09-09 11:52:26 -07003980
3981/*===========================================================================
3982 * FUNCTION : processCaptureRequest
3983 *
3984 * DESCRIPTION: process a capture request from camera service
3985 *
3986 * PARAMETERS :
3987 * @request : request from framework to process
3988 *
3989 * RETURN :
3990 *
3991 *==========================================================================*/
3992int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003993 camera3_capture_request_t *request,
3994 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003995{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003996 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07003997 int rc = NO_ERROR;
3998 int32_t request_id;
3999 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004000 bool isVidBufRequested = false;
4001 camera3_stream_buffer_t *pInputBuffer = NULL;
4002
4003 pthread_mutex_lock(&mMutex);
4004
4005 // Validate current state
4006 switch (mState) {
4007 case CONFIGURED:
4008 case STARTED:
4009 /* valid state */
4010 break;
4011
4012 case ERROR:
4013 pthread_mutex_unlock(&mMutex);
4014 handleCameraDeviceError();
4015 return -ENODEV;
4016
4017 default:
4018 LOGE("Invalid state %d", mState);
4019 pthread_mutex_unlock(&mMutex);
4020 return -ENODEV;
4021 }
4022
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004023 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004024 if (rc != NO_ERROR) {
4025 LOGE("incoming request is not valid");
4026 pthread_mutex_unlock(&mMutex);
4027 return rc;
4028 }
4029
4030 meta = request->settings;
4031
4032 // For first capture request, send capture intent, and
4033 // stream on all streams
4034 if (mState == CONFIGURED) {
4035 // send an unconfigure to the backend so that the isp
4036 // resources are deallocated
4037 if (!mFirstConfiguration) {
4038 cam_stream_size_info_t stream_config_info;
4039 int32_t hal_version = CAM_HAL_V3;
4040 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4041 stream_config_info.buffer_info.min_buffers =
4042 MIN_INFLIGHT_REQUESTS;
4043 stream_config_info.buffer_info.max_buffers =
4044 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4045 clear_metadata_buffer(mParameters);
4046 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4047 CAM_INTF_PARM_HAL_VERSION, hal_version);
4048 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4049 CAM_INTF_META_STREAM_INFO, stream_config_info);
4050 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4051 mParameters);
4052 if (rc < 0) {
4053 LOGE("set_parms for unconfigure failed");
4054 pthread_mutex_unlock(&mMutex);
4055 return rc;
4056 }
4057 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004058 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004059 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004060 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004061 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004062 property_get("persist.camera.is_type", is_type_value, "4");
4063 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4064 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4065 property_get("persist.camera.is_type_preview", is_type_value, "4");
4066 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4067 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004068
4069 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4070 int32_t hal_version = CAM_HAL_V3;
4071 uint8_t captureIntent =
4072 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4073 mCaptureIntent = captureIntent;
4074 clear_metadata_buffer(mParameters);
4075 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4076 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4077 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004078 if (mFirstConfiguration) {
4079 // configure instant AEC
4080 // Instant AEC is a session based parameter and it is needed only
4081 // once per complete session after open camera.
4082 // i.e. This is set only once for the first capture request, after open camera.
4083 setInstantAEC(meta);
4084 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004085 uint8_t fwkVideoStabMode=0;
4086 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4087 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4088 }
4089
4090 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4091 // turn it on for video/preview
4092 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4093 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004094 int32_t vsMode;
4095 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4097 rc = BAD_VALUE;
4098 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004099 LOGD("setEis %d", setEis);
4100 bool eis3Supported = false;
4101 size_t count = IS_TYPE_MAX;
4102 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4103 for (size_t i = 0; i < count; i++) {
4104 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4105 eis3Supported = true;
4106 break;
4107 }
4108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004109
4110 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004111 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004112 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4113 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004114 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4115 is_type = isTypePreview;
4116 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4117 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4118 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004120 } else {
4121 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004122 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004123 } else {
4124 is_type = IS_TYPE_NONE;
4125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004127 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004128 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4129 }
4130 }
4131
4132 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4133 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4134
4135 int32_t tintless_value = 1;
4136 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4137 CAM_INTF_PARM_TINTLESS, tintless_value);
4138 //Disable CDS for HFR mode or if DIS/EIS is on.
4139 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4140 //after every configure_stream
4141 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4142 (m_bIsVideo)) {
4143 int32_t cds = CAM_CDS_MODE_OFF;
4144 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4145 CAM_INTF_PARM_CDS_MODE, cds))
4146 LOGE("Failed to disable CDS for HFR mode");
4147
4148 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004149
4150 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4151 uint8_t* use_av_timer = NULL;
4152
4153 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004154 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004155 use_av_timer = &m_debug_avtimer;
4156 }
4157 else{
4158 use_av_timer =
4159 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004160 if (use_av_timer) {
4161 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4162 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004163 }
4164
4165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4166 rc = BAD_VALUE;
4167 }
4168 }
4169
Thierry Strudel3d639192016-09-09 11:52:26 -07004170 setMobicat();
4171
4172 /* Set fps and hfr mode while sending meta stream info so that sensor
4173 * can configure appropriate streaming mode */
4174 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004175 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4176 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004177 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4178 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004179 if (rc == NO_ERROR) {
4180 int32_t max_fps =
4181 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004182 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004183 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4184 }
4185 /* For HFR, more buffers are dequeued upfront to improve the performance */
4186 if (mBatchSize) {
4187 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4188 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4189 }
4190 }
4191 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004192 LOGE("setHalFpsRange failed");
4193 }
4194 }
4195 if (meta.exists(ANDROID_CONTROL_MODE)) {
4196 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4197 rc = extractSceneMode(meta, metaMode, mParameters);
4198 if (rc != NO_ERROR) {
4199 LOGE("extractSceneMode failed");
4200 }
4201 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004202 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004203
Thierry Strudel04e026f2016-10-10 11:27:36 -07004204 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4205 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4206 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4207 rc = setVideoHdrMode(mParameters, vhdr);
4208 if (rc != NO_ERROR) {
4209 LOGE("setVideoHDR is failed");
4210 }
4211 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004212
Thierry Strudel3d639192016-09-09 11:52:26 -07004213 //TODO: validate the arguments, HSV scenemode should have only the
4214 //advertised fps ranges
4215
4216 /*set the capture intent, hal version, tintless, stream info,
4217 *and disenable parameters to the backend*/
4218 LOGD("set_parms META_STREAM_INFO " );
4219 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4220 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004221 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004222 mStreamConfigInfo.type[i],
4223 mStreamConfigInfo.stream_sizes[i].width,
4224 mStreamConfigInfo.stream_sizes[i].height,
4225 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004226 mStreamConfigInfo.format[i],
4227 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004228 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004229
Thierry Strudel3d639192016-09-09 11:52:26 -07004230 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4231 mParameters);
4232 if (rc < 0) {
4233 LOGE("set_parms failed for hal version, stream info");
4234 }
4235
4236 cam_dimension_t sensor_dim;
4237 memset(&sensor_dim, 0, sizeof(sensor_dim));
4238 rc = getSensorOutputSize(sensor_dim);
4239 if (rc != NO_ERROR) {
4240 LOGE("Failed to get sensor output size");
4241 pthread_mutex_unlock(&mMutex);
4242 goto error_exit;
4243 }
4244
4245 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4246 gCamCapability[mCameraId]->active_array_size.height,
4247 sensor_dim.width, sensor_dim.height);
4248
4249 /* Set batchmode before initializing channel. Since registerBuffer
4250 * internally initializes some of the channels, better set batchmode
4251 * even before first register buffer */
4252 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4253 it != mStreamInfo.end(); it++) {
4254 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4255 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4256 && mBatchSize) {
4257 rc = channel->setBatchSize(mBatchSize);
4258 //Disable per frame map unmap for HFR/batchmode case
4259 rc |= channel->setPerFrameMapUnmap(false);
4260 if (NO_ERROR != rc) {
4261 LOGE("Channel init failed %d", rc);
4262 pthread_mutex_unlock(&mMutex);
4263 goto error_exit;
4264 }
4265 }
4266 }
4267
4268 //First initialize all streams
4269 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4270 it != mStreamInfo.end(); it++) {
4271 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4272 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4273 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004274 setEis) {
4275 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4276 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4277 is_type = mStreamConfigInfo.is_type[i];
4278 break;
4279 }
4280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004281 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004282 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004283 rc = channel->initialize(IS_TYPE_NONE);
4284 }
4285 if (NO_ERROR != rc) {
4286 LOGE("Channel initialization failed %d", rc);
4287 pthread_mutex_unlock(&mMutex);
4288 goto error_exit;
4289 }
4290 }
4291
4292 if (mRawDumpChannel) {
4293 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4294 if (rc != NO_ERROR) {
4295 LOGE("Error: Raw Dump Channel init failed");
4296 pthread_mutex_unlock(&mMutex);
4297 goto error_exit;
4298 }
4299 }
4300 if (mSupportChannel) {
4301 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4302 if (rc < 0) {
4303 LOGE("Support channel initialization failed");
4304 pthread_mutex_unlock(&mMutex);
4305 goto error_exit;
4306 }
4307 }
4308 if (mAnalysisChannel) {
4309 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4310 if (rc < 0) {
4311 LOGE("Analysis channel initialization failed");
4312 pthread_mutex_unlock(&mMutex);
4313 goto error_exit;
4314 }
4315 }
4316 if (mDummyBatchChannel) {
4317 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4318 if (rc < 0) {
4319 LOGE("mDummyBatchChannel setBatchSize failed");
4320 pthread_mutex_unlock(&mMutex);
4321 goto error_exit;
4322 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004323 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004324 if (rc < 0) {
4325 LOGE("mDummyBatchChannel initialization failed");
4326 pthread_mutex_unlock(&mMutex);
4327 goto error_exit;
4328 }
4329 }
4330
4331 // Set bundle info
4332 rc = setBundleInfo();
4333 if (rc < 0) {
4334 LOGE("setBundleInfo failed %d", rc);
4335 pthread_mutex_unlock(&mMutex);
4336 goto error_exit;
4337 }
4338
4339 //update settings from app here
4340 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4341 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4342 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4343 }
4344 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4345 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4346 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4347 }
4348 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4349 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4350 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4351
4352 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4353 (mLinkedCameraId != mCameraId) ) {
4354 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4355 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004356 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004357 goto error_exit;
4358 }
4359 }
4360
4361 // add bundle related cameras
4362 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4363 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004364 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4365 &m_pDualCamCmdPtr->bundle_info;
4366 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004367 if (mIsDeviceLinked)
4368 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4369 else
4370 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4371
4372 pthread_mutex_lock(&gCamLock);
4373
4374 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4375 LOGE("Dualcam: Invalid Session Id ");
4376 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004377 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004378 goto error_exit;
4379 }
4380
4381 if (mIsMainCamera == 1) {
4382 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4383 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004384 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004385 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004386 // related session id should be session id of linked session
4387 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4388 } else {
4389 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4390 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004391 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004392 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004393 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4394 }
4395 pthread_mutex_unlock(&gCamLock);
4396
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004397 rc = mCameraHandle->ops->set_dual_cam_cmd(
4398 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004399 if (rc < 0) {
4400 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004401 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004402 goto error_exit;
4403 }
4404 }
4405
4406 //Then start them.
4407 LOGH("Start META Channel");
4408 rc = mMetadataChannel->start();
4409 if (rc < 0) {
4410 LOGE("META channel start failed");
4411 pthread_mutex_unlock(&mMutex);
4412 goto error_exit;
4413 }
4414
4415 if (mAnalysisChannel) {
4416 rc = mAnalysisChannel->start();
4417 if (rc < 0) {
4418 LOGE("Analysis channel start failed");
4419 mMetadataChannel->stop();
4420 pthread_mutex_unlock(&mMutex);
4421 goto error_exit;
4422 }
4423 }
4424
4425 if (mSupportChannel) {
4426 rc = mSupportChannel->start();
4427 if (rc < 0) {
4428 LOGE("Support channel start failed");
4429 mMetadataChannel->stop();
4430 /* Although support and analysis are mutually exclusive today
4431 adding it in anycase for future proofing */
4432 if (mAnalysisChannel) {
4433 mAnalysisChannel->stop();
4434 }
4435 pthread_mutex_unlock(&mMutex);
4436 goto error_exit;
4437 }
4438 }
4439 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4440 it != mStreamInfo.end(); it++) {
4441 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4442 LOGH("Start Processing Channel mask=%d",
4443 channel->getStreamTypeMask());
4444 rc = channel->start();
4445 if (rc < 0) {
4446 LOGE("channel start failed");
4447 pthread_mutex_unlock(&mMutex);
4448 goto error_exit;
4449 }
4450 }
4451
4452 if (mRawDumpChannel) {
4453 LOGD("Starting raw dump stream");
4454 rc = mRawDumpChannel->start();
4455 if (rc != NO_ERROR) {
4456 LOGE("Error Starting Raw Dump Channel");
4457 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4458 it != mStreamInfo.end(); it++) {
4459 QCamera3Channel *channel =
4460 (QCamera3Channel *)(*it)->stream->priv;
4461 LOGH("Stopping Processing Channel mask=%d",
4462 channel->getStreamTypeMask());
4463 channel->stop();
4464 }
4465 if (mSupportChannel)
4466 mSupportChannel->stop();
4467 if (mAnalysisChannel) {
4468 mAnalysisChannel->stop();
4469 }
4470 mMetadataChannel->stop();
4471 pthread_mutex_unlock(&mMutex);
4472 goto error_exit;
4473 }
4474 }
4475
4476 if (mChannelHandle) {
4477
4478 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4479 mChannelHandle);
4480 if (rc != NO_ERROR) {
4481 LOGE("start_channel failed %d", rc);
4482 pthread_mutex_unlock(&mMutex);
4483 goto error_exit;
4484 }
4485 }
4486
4487 goto no_error;
4488error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004489 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004490 return rc;
4491no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004492 mWokenUpByDaemon = false;
4493 mPendingLiveRequest = 0;
4494 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004495 }
4496
4497 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004498 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004499
4500 if (mFlushPerf) {
4501 //we cannot accept any requests during flush
4502 LOGE("process_capture_request cannot proceed during flush");
4503 pthread_mutex_unlock(&mMutex);
4504 return NO_ERROR; //should return an error
4505 }
4506
4507 if (meta.exists(ANDROID_REQUEST_ID)) {
4508 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4509 mCurrentRequestId = request_id;
4510 LOGD("Received request with id: %d", request_id);
4511 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4512 LOGE("Unable to find request id field, \
4513 & no previous id available");
4514 pthread_mutex_unlock(&mMutex);
4515 return NAME_NOT_FOUND;
4516 } else {
4517 LOGD("Re-using old request id");
4518 request_id = mCurrentRequestId;
4519 }
4520
4521 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4522 request->num_output_buffers,
4523 request->input_buffer,
4524 frameNumber);
4525 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004526 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004527 int blob_request = 0;
4528 uint32_t snapshotStreamId = 0;
4529 for (size_t i = 0; i < request->num_output_buffers; i++) {
4530 const camera3_stream_buffer_t& output = request->output_buffers[i];
4531 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4532
4533 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004534 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004535 blob_request = 1;
4536 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4537 }
4538
4539 if (output.acquire_fence != -1) {
4540 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4541 close(output.acquire_fence);
4542 if (rc != OK) {
4543 LOGE("sync wait failed %d", rc);
4544 pthread_mutex_unlock(&mMutex);
4545 return rc;
4546 }
4547 }
4548
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004549 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004550 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004551
4552 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4553 isVidBufRequested = true;
4554 }
4555 }
4556
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004557 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4558 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4559 itr++) {
4560 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4561 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4562 channel->getStreamID(channel->getStreamTypeMask());
4563
4564 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4565 isVidBufRequested = true;
4566 }
4567 }
4568
Thierry Strudel3d639192016-09-09 11:52:26 -07004569 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004570 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004571 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004572 }
4573 if (blob_request && mRawDumpChannel) {
4574 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004575 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004576 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004577 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004578 }
4579
4580 if(request->input_buffer == NULL) {
4581 /* Parse the settings:
4582 * - For every request in NORMAL MODE
4583 * - For every request in HFR mode during preview only case
4584 * - For first request of every batch in HFR mode during video
4585 * recording. In batchmode the same settings except frame number is
4586 * repeated in each request of the batch.
4587 */
4588 if (!mBatchSize ||
4589 (mBatchSize && !isVidBufRequested) ||
4590 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004591 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004592 if (rc < 0) {
4593 LOGE("fail to set frame parameters");
4594 pthread_mutex_unlock(&mMutex);
4595 return rc;
4596 }
4597 }
4598 /* For batchMode HFR, setFrameParameters is not called for every
4599 * request. But only frame number of the latest request is parsed.
4600 * Keep track of first and last frame numbers in a batch so that
4601 * metadata for the frame numbers of batch can be duplicated in
4602 * handleBatchMetadta */
4603 if (mBatchSize) {
4604 if (!mToBeQueuedVidBufs) {
4605 //start of the batch
4606 mFirstFrameNumberInBatch = request->frame_number;
4607 }
4608 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4609 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4610 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004611 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004612 return BAD_VALUE;
4613 }
4614 }
4615 if (mNeedSensorRestart) {
4616 /* Unlock the mutex as restartSensor waits on the channels to be
4617 * stopped, which in turn calls stream callback functions -
4618 * handleBufferWithLock and handleMetadataWithLock */
4619 pthread_mutex_unlock(&mMutex);
4620 rc = dynamicUpdateMetaStreamInfo();
4621 if (rc != NO_ERROR) {
4622 LOGE("Restarting the sensor failed");
4623 return BAD_VALUE;
4624 }
4625 mNeedSensorRestart = false;
4626 pthread_mutex_lock(&mMutex);
4627 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004628 if(mResetInstantAEC) {
4629 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4630 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4631 mResetInstantAEC = false;
4632 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004633 } else {
4634
4635 if (request->input_buffer->acquire_fence != -1) {
4636 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4637 close(request->input_buffer->acquire_fence);
4638 if (rc != OK) {
4639 LOGE("input buffer sync wait failed %d", rc);
4640 pthread_mutex_unlock(&mMutex);
4641 return rc;
4642 }
4643 }
4644 }
4645
4646 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4647 mLastCustIntentFrmNum = frameNumber;
4648 }
4649 /* Update pending request list and pending buffers map */
4650 PendingRequestInfo pendingRequest;
4651 pendingRequestIterator latestRequest;
4652 pendingRequest.frame_number = frameNumber;
4653 pendingRequest.num_buffers = request->num_output_buffers;
4654 pendingRequest.request_id = request_id;
4655 pendingRequest.blob_request = blob_request;
4656 pendingRequest.timestamp = 0;
4657 pendingRequest.bUrgentReceived = 0;
4658 if (request->input_buffer) {
4659 pendingRequest.input_buffer =
4660 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4661 *(pendingRequest.input_buffer) = *(request->input_buffer);
4662 pInputBuffer = pendingRequest.input_buffer;
4663 } else {
4664 pendingRequest.input_buffer = NULL;
4665 pInputBuffer = NULL;
4666 }
4667
4668 pendingRequest.pipeline_depth = 0;
4669 pendingRequest.partial_result_cnt = 0;
4670 extractJpegMetadata(mCurJpegMeta, request);
4671 pendingRequest.jpegMetadata = mCurJpegMeta;
4672 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4673 pendingRequest.shutter_notified = false;
4674
4675 //extract capture intent
4676 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4677 mCaptureIntent =
4678 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4679 }
4680 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004681 /* DevCamDebug metadata processCaptureRequest */
4682 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4683 mDevCamDebugMetaEnable =
4684 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4685 }
4686 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4687 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004688
4689 //extract CAC info
4690 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4691 mCacMode =
4692 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4693 }
4694 pendingRequest.fwkCacMode = mCacMode;
4695
4696 PendingBuffersInRequest bufsForCurRequest;
4697 bufsForCurRequest.frame_number = frameNumber;
4698 // Mark current timestamp for the new request
4699 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4700
4701 for (size_t i = 0; i < request->num_output_buffers; i++) {
4702 RequestedBufferInfo requestedBuf;
4703 memset(&requestedBuf, 0, sizeof(requestedBuf));
4704 requestedBuf.stream = request->output_buffers[i].stream;
4705 requestedBuf.buffer = NULL;
4706 pendingRequest.buffers.push_back(requestedBuf);
4707
4708 // Add to buffer handle the pending buffers list
4709 PendingBufferInfo bufferInfo;
4710 bufferInfo.buffer = request->output_buffers[i].buffer;
4711 bufferInfo.stream = request->output_buffers[i].stream;
4712 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4713 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4714 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4715 frameNumber, bufferInfo.buffer,
4716 channel->getStreamTypeMask(), bufferInfo.stream->format);
4717 }
4718 // Add this request packet into mPendingBuffersMap
4719 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4720 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4721 mPendingBuffersMap.get_num_overall_buffers());
4722
4723 latestRequest = mPendingRequestsList.insert(
4724 mPendingRequestsList.end(), pendingRequest);
4725 if(mFlush) {
4726 LOGI("mFlush is true");
4727 pthread_mutex_unlock(&mMutex);
4728 return NO_ERROR;
4729 }
4730
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004731 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004732 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004733 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004734
4735 if(request->input_buffer != NULL){
4736 LOGD("Input request, frame_number %d", frameNumber);
4737 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4738 if (NO_ERROR != rc) {
4739 LOGE("fail to set reproc parameters");
4740 pthread_mutex_unlock(&mMutex);
4741 return rc;
4742 }
4743 }
4744
4745 // Call request on other streams
4746 uint32_t streams_need_metadata = 0;
4747 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4748 for (size_t i = 0; i < request->num_output_buffers; i++) {
4749 const camera3_stream_buffer_t& output = request->output_buffers[i];
4750 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4751
4752 if (channel == NULL) {
4753 LOGW("invalid channel pointer for stream");
4754 continue;
4755 }
4756
4757 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4758 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4759 output.buffer, request->input_buffer, frameNumber);
4760 if(request->input_buffer != NULL){
4761 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004762 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004763 if (rc < 0) {
4764 LOGE("Fail to request on picture channel");
4765 pthread_mutex_unlock(&mMutex);
4766 return rc;
4767 }
4768 } else {
4769 LOGD("snapshot request with buffer %p, frame_number %d",
4770 output.buffer, frameNumber);
4771 if (!request->settings) {
4772 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004773 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 } else {
4775 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004776 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 }
4778 if (rc < 0) {
4779 LOGE("Fail to request on picture channel");
4780 pthread_mutex_unlock(&mMutex);
4781 return rc;
4782 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004783
4784 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4785 uint32_t j = 0;
4786 for (j = 0; j < streamsArray.num_streams; j++) {
4787 if (streamsArray.stream_request[j].streamID == streamId) {
4788 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4789 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4790 else
4791 streamsArray.stream_request[j].buf_index = indexUsed;
4792 break;
4793 }
4794 }
4795 if (j == streamsArray.num_streams) {
4796 LOGE("Did not find matching stream to update index");
4797 assert(0);
4798 }
4799
Thierry Strudel3d639192016-09-09 11:52:26 -07004800 pendingBufferIter->need_metadata = true;
4801 streams_need_metadata++;
4802 }
4803 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4804 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4806 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004807 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
4808 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 if (rc < 0) {
4810 LOGE("Fail to request on YUV channel");
4811 pthread_mutex_unlock(&mMutex);
4812 return rc;
4813 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004814
4815 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4816 uint32_t j = 0;
4817 for (j = 0; j < streamsArray.num_streams; j++) {
4818 if (streamsArray.stream_request[j].streamID == streamId) {
4819 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4820 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4821 else
4822 streamsArray.stream_request[j].buf_index = indexUsed;
4823 break;
4824 }
4825 }
4826 if (j == streamsArray.num_streams) {
4827 LOGE("Did not find matching stream to update index");
4828 assert(0);
4829 }
4830
Thierry Strudel3d639192016-09-09 11:52:26 -07004831 pendingBufferIter->need_metadata = needMetadata;
4832 if (needMetadata)
4833 streams_need_metadata += 1;
4834 LOGD("calling YUV channel request, need_metadata is %d",
4835 needMetadata);
4836 } else {
4837 LOGD("request with buffer %p, frame_number %d",
4838 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004839
4840 rc = channel->request(output.buffer, frameNumber, indexUsed);
4841
4842 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4843 uint32_t j = 0;
4844 for (j = 0; j < streamsArray.num_streams; j++) {
4845 if (streamsArray.stream_request[j].streamID == streamId) {
4846 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4847 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4848 else
4849 streamsArray.stream_request[j].buf_index = indexUsed;
4850 break;
4851 }
4852 }
4853 if (j == streamsArray.num_streams) {
4854 LOGE("Did not find matching stream to update index");
4855 assert(0);
4856 }
4857
Thierry Strudel3d639192016-09-09 11:52:26 -07004858 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4859 && mBatchSize) {
4860 mToBeQueuedVidBufs++;
4861 if (mToBeQueuedVidBufs == mBatchSize) {
4862 channel->queueBatchBuf();
4863 }
4864 }
4865 if (rc < 0) {
4866 LOGE("request failed");
4867 pthread_mutex_unlock(&mMutex);
4868 return rc;
4869 }
4870 }
4871 pendingBufferIter++;
4872 }
4873
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004874 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4875 itr++) {
4876 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4877
4878 if (channel == NULL) {
4879 LOGE("invalid channel pointer for stream");
4880 assert(0);
4881 return BAD_VALUE;
4882 }
4883
4884 InternalRequest requestedStream;
4885 requestedStream = (*itr);
4886
4887
4888 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
4889 LOGD("snapshot request internally input buffer %p, frame_number %d",
4890 request->input_buffer, frameNumber);
4891 if(request->input_buffer != NULL){
4892 rc = channel->request(NULL, frameNumber,
4893 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
4894 if (rc < 0) {
4895 LOGE("Fail to request on picture channel");
4896 pthread_mutex_unlock(&mMutex);
4897 return rc;
4898 }
4899 } else {
4900 LOGD("snapshot request with frame_number %d", frameNumber);
4901 if (!request->settings) {
4902 rc = channel->request(NULL, frameNumber,
4903 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
4904 } else {
4905 rc = channel->request(NULL, frameNumber,
4906 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
4907 }
4908 if (rc < 0) {
4909 LOGE("Fail to request on picture channel");
4910 pthread_mutex_unlock(&mMutex);
4911 return rc;
4912 }
4913
4914 if ((*itr).meteringOnly != 1) {
4915 requestedStream.need_metadata = 1;
4916 streams_need_metadata++;
4917 }
4918 }
4919
4920 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
4921 uint32_t j = 0;
4922 for (j = 0; j < streamsArray.num_streams; j++) {
4923 if (streamsArray.stream_request[j].streamID == streamId) {
4924 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
4925 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
4926 else
4927 streamsArray.stream_request[j].buf_index = indexUsed;
4928 break;
4929 }
4930 }
4931 if (j == streamsArray.num_streams) {
4932 LOGE("Did not find matching stream to update index");
4933 assert(0);
4934 }
4935
4936 } else {
4937 LOGE("Internal requests not supported on this stream type");
4938 assert(0);
4939 return INVALID_OPERATION;
4940 }
4941 latestRequest->internalRequestList.push_back(requestedStream);
4942 }
4943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 //If 2 streams have need_metadata set to true, fail the request, unless
4945 //we copy/reference count the metadata buffer
4946 if (streams_need_metadata > 1) {
4947 LOGE("not supporting request in which two streams requires"
4948 " 2 HAL metadata for reprocessing");
4949 pthread_mutex_unlock(&mMutex);
4950 return -EINVAL;
4951 }
4952
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004953 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004954 /* Set the parameters to backend:
4955 * - For every request in NORMAL MODE
4956 * - For every request in HFR mode during preview only case
4957 * - Once every batch in HFR mode during video recording
4958 */
4959 if (!mBatchSize ||
4960 (mBatchSize && !isVidBufRequested) ||
4961 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4962 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4963 mBatchSize, isVidBufRequested,
4964 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004965
4966 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
4967 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
4968 uint32_t m = 0;
4969 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
4970 if (streamsArray.stream_request[k].streamID ==
4971 mBatchedStreamsArray.stream_request[m].streamID)
4972 break;
4973 }
4974 if (m == mBatchedStreamsArray.num_streams) {
4975 mBatchedStreamsArray.stream_request\
4976 [mBatchedStreamsArray.num_streams].streamID =
4977 streamsArray.stream_request[k].streamID;
4978 mBatchedStreamsArray.stream_request\
4979 [mBatchedStreamsArray.num_streams].buf_index =
4980 streamsArray.stream_request[k].buf_index;
4981 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
4982 }
4983 }
4984 streamsArray = mBatchedStreamsArray;
4985 }
4986 /* Update stream id of all the requested buffers */
4987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
4988 LOGE("Failed to set stream type mask in the parameters");
4989 return BAD_VALUE;
4990 }
4991
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4993 mParameters);
4994 if (rc < 0) {
4995 LOGE("set_parms failed");
4996 }
4997 /* reset to zero coz, the batch is queued */
4998 mToBeQueuedVidBufs = 0;
4999 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005000 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5001 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
5002 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5003 uint32_t m = 0;
5004 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5005 if (streamsArray.stream_request[k].streamID ==
5006 mBatchedStreamsArray.stream_request[m].streamID)
5007 break;
5008 }
5009 if (m == mBatchedStreamsArray.num_streams) {
5010 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5011 streamsArray.stream_request[k].streamID;
5012 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5013 streamsArray.stream_request[k].buf_index;
5014 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5015 }
5016 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 }
5018 mPendingLiveRequest++;
5019 }
5020
5021 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5022
5023 mState = STARTED;
5024 // Added a timed condition wait
5025 struct timespec ts;
5026 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005027 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 if (rc < 0) {
5029 isValidTimeout = 0;
5030 LOGE("Error reading the real time clock!!");
5031 }
5032 else {
5033 // Make timeout as 5 sec for request to be honored
5034 ts.tv_sec += 5;
5035 }
5036 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005037 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005038 (mState != ERROR) && (mState != DEINIT)) {
5039 if (!isValidTimeout) {
5040 LOGD("Blocking on conditional wait");
5041 pthread_cond_wait(&mRequestCond, &mMutex);
5042 }
5043 else {
5044 LOGD("Blocking on timed conditional wait");
5045 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5046 if (rc == ETIMEDOUT) {
5047 rc = -ENODEV;
5048 LOGE("Unblocked on timeout!!!!");
5049 break;
5050 }
5051 }
5052 LOGD("Unblocked");
5053 if (mWokenUpByDaemon) {
5054 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005055 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 break;
5057 }
5058 }
5059 pthread_mutex_unlock(&mMutex);
5060
5061 return rc;
5062}
5063
5064/*===========================================================================
5065 * FUNCTION : dump
5066 *
5067 * DESCRIPTION:
5068 *
5069 * PARAMETERS :
5070 *
5071 *
5072 * RETURN :
5073 *==========================================================================*/
5074void QCamera3HardwareInterface::dump(int fd)
5075{
5076 pthread_mutex_lock(&mMutex);
5077 dprintf(fd, "\n Camera HAL3 information Begin \n");
5078
5079 dprintf(fd, "\nNumber of pending requests: %zu \n",
5080 mPendingRequestsList.size());
5081 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5082 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5083 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5084 for(pendingRequestIterator i = mPendingRequestsList.begin();
5085 i != mPendingRequestsList.end(); i++) {
5086 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5087 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5088 i->input_buffer);
5089 }
5090 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5091 mPendingBuffersMap.get_num_overall_buffers());
5092 dprintf(fd, "-------+------------------\n");
5093 dprintf(fd, " Frame | Stream type mask \n");
5094 dprintf(fd, "-------+------------------\n");
5095 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5096 for(auto &j : req.mPendingBufferList) {
5097 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5098 dprintf(fd, " %5d | %11d \n",
5099 req.frame_number, channel->getStreamTypeMask());
5100 }
5101 }
5102 dprintf(fd, "-------+------------------\n");
5103
5104 dprintf(fd, "\nPending frame drop list: %zu\n",
5105 mPendingFrameDropList.size());
5106 dprintf(fd, "-------+-----------\n");
5107 dprintf(fd, " Frame | Stream ID \n");
5108 dprintf(fd, "-------+-----------\n");
5109 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5110 i != mPendingFrameDropList.end(); i++) {
5111 dprintf(fd, " %5d | %9d \n",
5112 i->frame_number, i->stream_ID);
5113 }
5114 dprintf(fd, "-------+-----------\n");
5115
5116 dprintf(fd, "\n Camera HAL3 information End \n");
5117
5118 /* use dumpsys media.camera as trigger to send update debug level event */
5119 mUpdateDebugLevel = true;
5120 pthread_mutex_unlock(&mMutex);
5121 return;
5122}
5123
5124/*===========================================================================
5125 * FUNCTION : flush
5126 *
5127 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5128 * conditionally restarts channels
5129 *
5130 * PARAMETERS :
5131 * @ restartChannels: re-start all channels
5132 *
5133 *
5134 * RETURN :
5135 * 0 on success
5136 * Error code on failure
5137 *==========================================================================*/
5138int QCamera3HardwareInterface::flush(bool restartChannels)
5139{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005140 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 int32_t rc = NO_ERROR;
5142
5143 LOGD("Unblocking Process Capture Request");
5144 pthread_mutex_lock(&mMutex);
5145 mFlush = true;
5146 pthread_mutex_unlock(&mMutex);
5147
5148 rc = stopAllChannels();
5149 // unlink of dualcam
5150 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005151 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5152 &m_pDualCamCmdPtr->bundle_info;
5153 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005154 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5155 pthread_mutex_lock(&gCamLock);
5156
5157 if (mIsMainCamera == 1) {
5158 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5159 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005160 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005161 // related session id should be session id of linked session
5162 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5163 } else {
5164 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5165 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005166 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005167 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5168 }
5169 pthread_mutex_unlock(&gCamLock);
5170
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005171 rc = mCameraHandle->ops->set_dual_cam_cmd(
5172 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005173 if (rc < 0) {
5174 LOGE("Dualcam: Unlink failed, but still proceed to close");
5175 }
5176 }
5177
5178 if (rc < 0) {
5179 LOGE("stopAllChannels failed");
5180 return rc;
5181 }
5182 if (mChannelHandle) {
5183 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5184 mChannelHandle);
5185 }
5186
5187 // Reset bundle info
5188 rc = setBundleInfo();
5189 if (rc < 0) {
5190 LOGE("setBundleInfo failed %d", rc);
5191 return rc;
5192 }
5193
5194 // Mutex Lock
5195 pthread_mutex_lock(&mMutex);
5196
5197 // Unblock process_capture_request
5198 mPendingLiveRequest = 0;
5199 pthread_cond_signal(&mRequestCond);
5200
5201 rc = notifyErrorForPendingRequests();
5202 if (rc < 0) {
5203 LOGE("notifyErrorForPendingRequests failed");
5204 pthread_mutex_unlock(&mMutex);
5205 return rc;
5206 }
5207
5208 mFlush = false;
5209
5210 // Start the Streams/Channels
5211 if (restartChannels) {
5212 rc = startAllChannels();
5213 if (rc < 0) {
5214 LOGE("startAllChannels failed");
5215 pthread_mutex_unlock(&mMutex);
5216 return rc;
5217 }
5218 }
5219
5220 if (mChannelHandle) {
5221 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5222 mChannelHandle);
5223 if (rc < 0) {
5224 LOGE("start_channel failed");
5225 pthread_mutex_unlock(&mMutex);
5226 return rc;
5227 }
5228 }
5229
5230 pthread_mutex_unlock(&mMutex);
5231
5232 return 0;
5233}
5234
5235/*===========================================================================
5236 * FUNCTION : flushPerf
5237 *
5238 * DESCRIPTION: This is the performance optimization version of flush that does
5239 * not use stream off, rather flushes the system
5240 *
5241 * PARAMETERS :
5242 *
5243 *
5244 * RETURN : 0 : success
5245 * -EINVAL: input is malformed (device is not valid)
5246 * -ENODEV: if the device has encountered a serious error
5247 *==========================================================================*/
5248int QCamera3HardwareInterface::flushPerf()
5249{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005250 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 int32_t rc = 0;
5252 struct timespec timeout;
5253 bool timed_wait = false;
5254
5255 pthread_mutex_lock(&mMutex);
5256 mFlushPerf = true;
5257 mPendingBuffersMap.numPendingBufsAtFlush =
5258 mPendingBuffersMap.get_num_overall_buffers();
5259 LOGD("Calling flush. Wait for %d buffers to return",
5260 mPendingBuffersMap.numPendingBufsAtFlush);
5261
5262 /* send the flush event to the backend */
5263 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5264 if (rc < 0) {
5265 LOGE("Error in flush: IOCTL failure");
5266 mFlushPerf = false;
5267 pthread_mutex_unlock(&mMutex);
5268 return -ENODEV;
5269 }
5270
5271 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5272 LOGD("No pending buffers in HAL, return flush");
5273 mFlushPerf = false;
5274 pthread_mutex_unlock(&mMutex);
5275 return rc;
5276 }
5277
5278 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005279 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005280 if (rc < 0) {
5281 LOGE("Error reading the real time clock, cannot use timed wait");
5282 } else {
5283 timeout.tv_sec += FLUSH_TIMEOUT;
5284 timed_wait = true;
5285 }
5286
5287 //Block on conditional variable
5288 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5289 LOGD("Waiting on mBuffersCond");
5290 if (!timed_wait) {
5291 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5292 if (rc != 0) {
5293 LOGE("pthread_cond_wait failed due to rc = %s",
5294 strerror(rc));
5295 break;
5296 }
5297 } else {
5298 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5299 if (rc != 0) {
5300 LOGE("pthread_cond_timedwait failed due to rc = %s",
5301 strerror(rc));
5302 break;
5303 }
5304 }
5305 }
5306 if (rc != 0) {
5307 mFlushPerf = false;
5308 pthread_mutex_unlock(&mMutex);
5309 return -ENODEV;
5310 }
5311
5312 LOGD("Received buffers, now safe to return them");
5313
5314 //make sure the channels handle flush
5315 //currently only required for the picture channel to release snapshot resources
5316 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5317 it != mStreamInfo.end(); it++) {
5318 QCamera3Channel *channel = (*it)->channel;
5319 if (channel) {
5320 rc = channel->flush();
5321 if (rc) {
5322 LOGE("Flushing the channels failed with error %d", rc);
5323 // even though the channel flush failed we need to continue and
5324 // return the buffers we have to the framework, however the return
5325 // value will be an error
5326 rc = -ENODEV;
5327 }
5328 }
5329 }
5330
5331 /* notify the frameworks and send errored results */
5332 rc = notifyErrorForPendingRequests();
5333 if (rc < 0) {
5334 LOGE("notifyErrorForPendingRequests failed");
5335 pthread_mutex_unlock(&mMutex);
5336 return rc;
5337 }
5338
5339 //unblock process_capture_request
5340 mPendingLiveRequest = 0;
5341 unblockRequestIfNecessary();
5342
5343 mFlushPerf = false;
5344 pthread_mutex_unlock(&mMutex);
5345 LOGD ("Flush Operation complete. rc = %d", rc);
5346 return rc;
5347}
5348
5349/*===========================================================================
5350 * FUNCTION : handleCameraDeviceError
5351 *
5352 * DESCRIPTION: This function calls internal flush and notifies the error to
5353 * framework and updates the state variable.
5354 *
5355 * PARAMETERS : None
5356 *
5357 * RETURN : NO_ERROR on Success
5358 * Error code on failure
5359 *==========================================================================*/
5360int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5361{
5362 int32_t rc = NO_ERROR;
5363
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005364 {
5365 Mutex::Autolock lock(mFlushLock);
5366 pthread_mutex_lock(&mMutex);
5367 if (mState != ERROR) {
5368 //if mState != ERROR, nothing to be done
5369 pthread_mutex_unlock(&mMutex);
5370 return NO_ERROR;
5371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005372 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005373
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005374 rc = flush(false /* restart channels */);
5375 if (NO_ERROR != rc) {
5376 LOGE("internal flush to handle mState = ERROR failed");
5377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005378
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005379 pthread_mutex_lock(&mMutex);
5380 mState = DEINIT;
5381 pthread_mutex_unlock(&mMutex);
5382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005383
5384 camera3_notify_msg_t notify_msg;
5385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5386 notify_msg.type = CAMERA3_MSG_ERROR;
5387 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5388 notify_msg.message.error.error_stream = NULL;
5389 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005390 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005391
5392 return rc;
5393}
5394
5395/*===========================================================================
5396 * FUNCTION : captureResultCb
5397 *
5398 * DESCRIPTION: Callback handler for all capture result
5399 * (streams, as well as metadata)
5400 *
5401 * PARAMETERS :
5402 * @metadata : metadata information
5403 * @buffer : actual gralloc buffer to be returned to frameworks.
5404 * NULL if metadata.
5405 *
5406 * RETURN : NONE
5407 *==========================================================================*/
5408void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5409 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5410{
5411 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005412 pthread_mutex_lock(&mMutex);
5413 uint8_t batchSize = mBatchSize;
5414 pthread_mutex_unlock(&mMutex);
5415 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005416 handleBatchMetadata(metadata_buf,
5417 true /* free_and_bufdone_meta_buf */);
5418 } else { /* mBatchSize = 0 */
5419 hdrPlusPerfLock(metadata_buf);
5420 pthread_mutex_lock(&mMutex);
5421 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005422 true /* free_and_bufdone_meta_buf */,
5423 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 pthread_mutex_unlock(&mMutex);
5425 }
5426 } else if (isInputBuffer) {
5427 pthread_mutex_lock(&mMutex);
5428 handleInputBufferWithLock(frame_number);
5429 pthread_mutex_unlock(&mMutex);
5430 } else {
5431 pthread_mutex_lock(&mMutex);
5432 handleBufferWithLock(buffer, frame_number);
5433 pthread_mutex_unlock(&mMutex);
5434 }
5435 return;
5436}
5437
5438/*===========================================================================
5439 * FUNCTION : getReprocessibleOutputStreamId
5440 *
5441 * DESCRIPTION: Get source output stream id for the input reprocess stream
5442 * based on size and format, which would be the largest
5443 * output stream if an input stream exists.
5444 *
5445 * PARAMETERS :
5446 * @id : return the stream id if found
5447 *
5448 * RETURN : int32_t type of status
5449 * NO_ERROR -- success
5450 * none-zero failure code
5451 *==========================================================================*/
5452int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5453{
5454 /* check if any output or bidirectional stream with the same size and format
5455 and return that stream */
5456 if ((mInputStreamInfo.dim.width > 0) &&
5457 (mInputStreamInfo.dim.height > 0)) {
5458 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5459 it != mStreamInfo.end(); it++) {
5460
5461 camera3_stream_t *stream = (*it)->stream;
5462 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5463 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5464 (stream->format == mInputStreamInfo.format)) {
5465 // Usage flag for an input stream and the source output stream
5466 // may be different.
5467 LOGD("Found reprocessible output stream! %p", *it);
5468 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5469 stream->usage, mInputStreamInfo.usage);
5470
5471 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5472 if (channel != NULL && channel->mStreams[0]) {
5473 id = channel->mStreams[0]->getMyServerID();
5474 return NO_ERROR;
5475 }
5476 }
5477 }
5478 } else {
5479 LOGD("No input stream, so no reprocessible output stream");
5480 }
5481 return NAME_NOT_FOUND;
5482}
5483
5484/*===========================================================================
5485 * FUNCTION : lookupFwkName
5486 *
5487 * DESCRIPTION: In case the enum is not same in fwk and backend
5488 * make sure the parameter is correctly propogated
5489 *
5490 * PARAMETERS :
5491 * @arr : map between the two enums
5492 * @len : len of the map
5493 * @hal_name : name of the hal_parm to map
5494 *
5495 * RETURN : int type of status
5496 * fwk_name -- success
5497 * none-zero failure code
5498 *==========================================================================*/
5499template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5500 size_t len, halType hal_name)
5501{
5502
5503 for (size_t i = 0; i < len; i++) {
5504 if (arr[i].hal_name == hal_name) {
5505 return arr[i].fwk_name;
5506 }
5507 }
5508
5509 /* Not able to find matching framework type is not necessarily
5510 * an error case. This happens when mm-camera supports more attributes
5511 * than the frameworks do */
5512 LOGH("Cannot find matching framework type");
5513 return NAME_NOT_FOUND;
5514}
5515
5516/*===========================================================================
5517 * FUNCTION : lookupHalName
5518 *
5519 * DESCRIPTION: In case the enum is not same in fwk and backend
5520 * make sure the parameter is correctly propogated
5521 *
5522 * PARAMETERS :
5523 * @arr : map between the two enums
5524 * @len : len of the map
5525 * @fwk_name : name of the hal_parm to map
5526 *
5527 * RETURN : int32_t type of status
5528 * hal_name -- success
5529 * none-zero failure code
5530 *==========================================================================*/
5531template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5532 size_t len, fwkType fwk_name)
5533{
5534 for (size_t i = 0; i < len; i++) {
5535 if (arr[i].fwk_name == fwk_name) {
5536 return arr[i].hal_name;
5537 }
5538 }
5539
5540 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5541 return NAME_NOT_FOUND;
5542}
5543
5544/*===========================================================================
5545 * FUNCTION : lookupProp
5546 *
5547 * DESCRIPTION: lookup a value by its name
5548 *
5549 * PARAMETERS :
5550 * @arr : map between the two enums
5551 * @len : size of the map
5552 * @name : name to be looked up
5553 *
5554 * RETURN : Value if found
5555 * CAM_CDS_MODE_MAX if not found
5556 *==========================================================================*/
5557template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5558 size_t len, const char *name)
5559{
5560 if (name) {
5561 for (size_t i = 0; i < len; i++) {
5562 if (!strcmp(arr[i].desc, name)) {
5563 return arr[i].val;
5564 }
5565 }
5566 }
5567 return CAM_CDS_MODE_MAX;
5568}
5569
5570/*===========================================================================
5571 *
5572 * DESCRIPTION:
5573 *
5574 * PARAMETERS :
5575 * @metadata : metadata information from callback
5576 * @timestamp: metadata buffer timestamp
5577 * @request_id: request id
5578 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005579 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5580 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 * @pprocDone: whether internal offline postprocsesing is done
5582 *
5583 * RETURN : camera_metadata_t*
5584 * metadata in a format specified by fwk
5585 *==========================================================================*/
5586camera_metadata_t*
5587QCamera3HardwareInterface::translateFromHalMetadata(
5588 metadata_buffer_t *metadata,
5589 nsecs_t timestamp,
5590 int32_t request_id,
5591 const CameraMetadata& jpegMetadata,
5592 uint8_t pipeline_depth,
5593 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005594 /* DevCamDebug metadata translateFromHalMetadata argument */
5595 uint8_t DevCamDebug_meta_enable,
5596 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005597 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005598 uint8_t fwk_cacMode,
5599 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005600{
5601 CameraMetadata camMetadata;
5602 camera_metadata_t *resultMetadata;
5603
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005604 if (mBatchSize && !firstMetadataInBatch) {
5605 /* In batch mode, use cached metadata from the first metadata
5606 in the batch */
5607 camMetadata.clear();
5608 camMetadata = mCachedMetadata;
5609 }
5610
Thierry Strudel3d639192016-09-09 11:52:26 -07005611 if (jpegMetadata.entryCount())
5612 camMetadata.append(jpegMetadata);
5613
5614 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5615 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5616 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5617 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005618 if (mBatchSize == 0) {
5619 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5620 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5621 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005622
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005623 if (mBatchSize && !firstMetadataInBatch) {
5624 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5625 resultMetadata = camMetadata.release();
5626 return resultMetadata;
5627 }
5628
Samuel Ha68ba5172016-12-15 18:41:12 -08005629 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5630 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5631 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5632 // DevCamDebug metadata translateFromHalMetadata AF
5633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5634 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5635 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5636 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5637 }
5638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5639 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5640 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5641 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5642 }
5643 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5644 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5645 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5646 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5647 }
5648 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5649 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5650 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5651 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5652 }
5653 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5654 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5655 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5656 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5657 }
5658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5659 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5660 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5661 *DevCamDebug_af_monitor_pdaf_target_pos;
5662 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5663 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5664 }
5665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5666 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5667 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5668 *DevCamDebug_af_monitor_pdaf_confidence;
5669 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5670 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5671 }
5672 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5673 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5674 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5675 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5676 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5677 }
5678 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5679 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5680 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5681 *DevCamDebug_af_monitor_tof_target_pos;
5682 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5683 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5684 }
5685 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5686 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5687 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5688 *DevCamDebug_af_monitor_tof_confidence;
5689 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5690 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5691 }
5692 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5693 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5694 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5695 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5696 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5697 }
5698 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5699 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5700 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5701 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5702 &fwk_DevCamDebug_af_monitor_type_select, 1);
5703 }
5704 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5705 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5706 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5707 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5708 &fwk_DevCamDebug_af_monitor_refocus, 1);
5709 }
5710 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5711 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5712 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5713 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5714 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5715 }
5716 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5717 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5718 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5719 *DevCamDebug_af_search_pdaf_target_pos;
5720 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5721 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5722 }
5723 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5724 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5725 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5726 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5727 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5728 }
5729 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5730 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5731 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5732 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5733 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5734 }
5735 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5736 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5737 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5738 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5739 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5740 }
5741 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5742 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5743 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5744 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5745 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
5746 }
5747 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
5748 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
5749 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
5750 *DevCamDebug_af_search_tof_target_pos;
5751 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
5752 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
5753 }
5754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
5755 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
5756 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
5757 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
5758 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
5759 }
5760 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
5761 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
5762 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
5763 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
5764 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
5765 }
5766 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
5767 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
5768 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
5769 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
5770 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
5771 }
5772 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
5773 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
5774 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
5775 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
5776 &fwk_DevCamDebug_af_search_tof_confidence, 1);
5777 }
5778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
5779 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
5780 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
5781 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
5782 &fwk_DevCamDebug_af_search_type_select, 1);
5783 }
5784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
5785 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
5786 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
5787 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
5788 &fwk_DevCamDebug_af_search_next_pos, 1);
5789 }
5790 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
5791 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
5792 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
5793 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
5794 &fwk_DevCamDebug_af_search_target_pos, 1);
5795 }
5796 // DevCamDebug metadata translateFromHalMetadata AEC
5797 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
5798 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
5799 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
5800 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
5801 }
5802 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
5803 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
5804 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
5805 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
5806 }
5807 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
5808 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
5809 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
5810 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
5811 }
5812 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
5813 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
5814 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
5815 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
5816 }
5817 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
5818 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
5819 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
5820 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
5821 }
5822 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
5823 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
5824 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
5825 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
5826 }
5827 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
5828 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
5829 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
5830 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
5831 }
5832 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
5833 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
5834 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
5835 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
5836 }
5837 // DevCamDebug metadata translateFromHalMetadata AWB
5838 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
5839 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
5840 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
5841 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
5842 }
5843 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
5844 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
5845 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
5846 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
5847 }
5848 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
5849 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
5850 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
5851 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
5852 }
5853 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
5854 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
5855 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
5856 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
5857 }
5858 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
5859 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
5860 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
5861 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
5862 }
5863 }
5864 // atrace_end(ATRACE_TAG_ALWAYS);
5865
Thierry Strudel3d639192016-09-09 11:52:26 -07005866 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
5867 int64_t fwk_frame_number = *frame_number;
5868 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
5869 }
5870
5871 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
5872 int32_t fps_range[2];
5873 fps_range[0] = (int32_t)float_range->min_fps;
5874 fps_range[1] = (int32_t)float_range->max_fps;
5875 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
5876 fps_range, 2);
5877 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
5878 fps_range[0], fps_range[1]);
5879 }
5880
5881 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
5882 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
5883 }
5884
5885 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5886 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
5887 METADATA_MAP_SIZE(SCENE_MODES_MAP),
5888 *sceneMode);
5889 if (NAME_NOT_FOUND != val) {
5890 uint8_t fwkSceneMode = (uint8_t)val;
5891 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
5892 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
5893 fwkSceneMode);
5894 }
5895 }
5896
5897 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
5898 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
5899 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
5900 }
5901
5902 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
5903 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
5904 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
5905 }
5906
5907 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
5908 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
5909 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
5910 }
5911
5912 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
5913 CAM_INTF_META_EDGE_MODE, metadata) {
5914 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
5915 }
5916
5917 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
5918 uint8_t fwk_flashPower = (uint8_t) *flashPower;
5919 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
5920 }
5921
5922 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
5923 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
5924 }
5925
5926 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
5927 if (0 <= *flashState) {
5928 uint8_t fwk_flashState = (uint8_t) *flashState;
5929 if (!gCamCapability[mCameraId]->flash_available) {
5930 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
5931 }
5932 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
5933 }
5934 }
5935
5936 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
5937 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
5938 if (NAME_NOT_FOUND != val) {
5939 uint8_t fwk_flashMode = (uint8_t)val;
5940 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
5941 }
5942 }
5943
5944 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
5945 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
5946 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
5947 }
5948
5949 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
5950 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
5951 }
5952
5953 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
5954 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
5955 }
5956
5957 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
5958 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
5959 }
5960
5961 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
5962 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
5963 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
5964 }
5965
5966 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
5967 uint8_t fwk_videoStab = (uint8_t) *videoStab;
5968 LOGD("fwk_videoStab = %d", fwk_videoStab);
5969 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
5970 } else {
5971 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
5972 // and so hardcoding the Video Stab result to OFF mode.
5973 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
5974 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005975 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 }
5977
5978 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
5979 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
5980 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
5981 }
5982
5983 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
5984 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
5985 }
5986
Thierry Strudel3d639192016-09-09 11:52:26 -07005987 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
5988 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005989 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07005990
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005991 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
5992 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07005993
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005994 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07005995 blackLevelAppliedPattern->cam_black_level[0],
5996 blackLevelAppliedPattern->cam_black_level[1],
5997 blackLevelAppliedPattern->cam_black_level[2],
5998 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07005999 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6000 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006001
6002#ifndef USE_HAL_3_3
6003 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006004 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6005 // depth space.
6006 fwk_blackLevelInd[0] /= 4.0;
6007 fwk_blackLevelInd[1] /= 4.0;
6008 fwk_blackLevelInd[2] /= 4.0;
6009 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006010 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6011 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006012#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006013 }
6014
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006015#ifndef USE_HAL_3_3
6016 // Fixed whitelevel is used by ISP/Sensor
6017 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6018 &gCamCapability[mCameraId]->white_level, 1);
6019#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006020
6021 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6022 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6023 int32_t scalerCropRegion[4];
6024 scalerCropRegion[0] = hScalerCropRegion->left;
6025 scalerCropRegion[1] = hScalerCropRegion->top;
6026 scalerCropRegion[2] = hScalerCropRegion->width;
6027 scalerCropRegion[3] = hScalerCropRegion->height;
6028
6029 // Adjust crop region from sensor output coordinate system to active
6030 // array coordinate system.
6031 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6032 scalerCropRegion[2], scalerCropRegion[3]);
6033
6034 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6035 }
6036
6037 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6038 LOGD("sensorExpTime = %lld", *sensorExpTime);
6039 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6040 }
6041
6042 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6043 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6044 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6045 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6046 }
6047
6048 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6049 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6050 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6051 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6052 sensorRollingShutterSkew, 1);
6053 }
6054
6055 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6056 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6057 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6058
6059 //calculate the noise profile based on sensitivity
6060 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6061 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6062 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6063 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6064 noise_profile[i] = noise_profile_S;
6065 noise_profile[i+1] = noise_profile_O;
6066 }
6067 LOGD("noise model entry (S, O) is (%f, %f)",
6068 noise_profile_S, noise_profile_O);
6069 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6070 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6071 }
6072
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006073#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006074 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006075 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006076 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006077 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006078 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6079 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6080 }
6081 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006082#endif
6083
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6085 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6086 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6087 }
6088
6089 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6090 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6091 *faceDetectMode);
6092 if (NAME_NOT_FOUND != val) {
6093 uint8_t fwk_faceDetectMode = (uint8_t)val;
6094 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6095
6096 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6097 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6098 CAM_INTF_META_FACE_DETECTION, metadata) {
6099 uint8_t numFaces = MIN(
6100 faceDetectionInfo->num_faces_detected, MAX_ROI);
6101 int32_t faceIds[MAX_ROI];
6102 uint8_t faceScores[MAX_ROI];
6103 int32_t faceRectangles[MAX_ROI * 4];
6104 int32_t faceLandmarks[MAX_ROI * 6];
6105 size_t j = 0, k = 0;
6106
6107 for (size_t i = 0; i < numFaces; i++) {
6108 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6109 // Adjust crop region from sensor output coordinate system to active
6110 // array coordinate system.
6111 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6112 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6113 rect.width, rect.height);
6114
6115 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6116 faceRectangles+j, -1);
6117
6118 j+= 4;
6119 }
6120 if (numFaces <= 0) {
6121 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6122 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6123 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6124 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6125 }
6126
6127 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6128 numFaces);
6129 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6130 faceRectangles, numFaces * 4U);
6131 if (fwk_faceDetectMode ==
6132 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6133 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6134 CAM_INTF_META_FACE_LANDMARK, metadata) {
6135
6136 for (size_t i = 0; i < numFaces; i++) {
6137 // Map the co-ordinate sensor output coordinate system to active
6138 // array coordinate system.
6139 mCropRegionMapper.toActiveArray(
6140 landmarks->face_landmarks[i].left_eye_center.x,
6141 landmarks->face_landmarks[i].left_eye_center.y);
6142 mCropRegionMapper.toActiveArray(
6143 landmarks->face_landmarks[i].right_eye_center.x,
6144 landmarks->face_landmarks[i].right_eye_center.y);
6145 mCropRegionMapper.toActiveArray(
6146 landmarks->face_landmarks[i].mouth_center.x,
6147 landmarks->face_landmarks[i].mouth_center.y);
6148
6149 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006150 k+= TOTAL_LANDMARK_INDICES;
6151 }
6152 } else {
6153 for (size_t i = 0; i < numFaces; i++) {
6154 setInvalidLandmarks(faceLandmarks+k);
6155 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006156 }
6157 }
6158
6159 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6160 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6161 faceLandmarks, numFaces * 6U);
6162 }
6163 }
6164 }
6165 }
6166 }
6167
6168 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6169 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6170 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006171
6172 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6173 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6174 // process histogram statistics info
6175 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6176 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6177 cam_histogram_data_t rHistData, gHistData, bHistData;
6178 memset(&rHistData, 0, sizeof(rHistData));
6179 memset(&gHistData, 0, sizeof(gHistData));
6180 memset(&bHistData, 0, sizeof(bHistData));
6181
6182 switch (stats_data->type) {
6183 case CAM_HISTOGRAM_TYPE_BAYER:
6184 switch (stats_data->bayer_stats.data_type) {
6185 case CAM_STATS_CHANNEL_GR:
6186 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6187 break;
6188 case CAM_STATS_CHANNEL_GB:
6189 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6190 break;
6191 case CAM_STATS_CHANNEL_B:
6192 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6193 break;
6194 case CAM_STATS_CHANNEL_ALL:
6195 rHistData = stats_data->bayer_stats.r_stats;
6196 //Framework expects only 3 channels. So, for now,
6197 //use gb stats for G channel.
6198 gHistData = stats_data->bayer_stats.gb_stats;
6199 bHistData = stats_data->bayer_stats.b_stats;
6200 break;
6201 case CAM_STATS_CHANNEL_Y:
6202 case CAM_STATS_CHANNEL_R:
6203 default:
6204 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6205 break;
6206 }
6207 break;
6208 case CAM_HISTOGRAM_TYPE_YUV:
6209 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6210 break;
6211 }
6212
6213 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6214 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6215 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6216
6217 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6218 }
6219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006220 }
6221
6222 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6223 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6224 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6225 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6226 }
6227
6228 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6229 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6230 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6231 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6232 }
6233
6234 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6235 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6236 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6237 CAM_MAX_SHADING_MAP_HEIGHT);
6238 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6239 CAM_MAX_SHADING_MAP_WIDTH);
6240 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6241 lensShadingMap->lens_shading, 4U * map_width * map_height);
6242 }
6243
6244 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6245 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6246 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6247 }
6248
6249 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6250 //Populate CAM_INTF_META_TONEMAP_CURVES
6251 /* ch0 = G, ch 1 = B, ch 2 = R*/
6252 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6253 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6254 tonemap->tonemap_points_cnt,
6255 CAM_MAX_TONEMAP_CURVE_SIZE);
6256 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6257 }
6258
6259 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6260 &tonemap->curves[0].tonemap_points[0][0],
6261 tonemap->tonemap_points_cnt * 2);
6262
6263 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6264 &tonemap->curves[1].tonemap_points[0][0],
6265 tonemap->tonemap_points_cnt * 2);
6266
6267 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6268 &tonemap->curves[2].tonemap_points[0][0],
6269 tonemap->tonemap_points_cnt * 2);
6270 }
6271
6272 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6273 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6274 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6275 CC_GAIN_MAX);
6276 }
6277
6278 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6279 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6280 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6281 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6282 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6283 }
6284
6285 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6286 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6287 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6288 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6289 toneCurve->tonemap_points_cnt,
6290 CAM_MAX_TONEMAP_CURVE_SIZE);
6291 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6292 }
6293 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6294 (float*)toneCurve->curve.tonemap_points,
6295 toneCurve->tonemap_points_cnt * 2);
6296 }
6297
6298 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6299 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6300 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6301 predColorCorrectionGains->gains, 4);
6302 }
6303
6304 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6305 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6306 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6307 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6308 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6309 }
6310
6311 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6312 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6313 }
6314
6315 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6316 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6317 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6318 }
6319
6320 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6321 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6322 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6323 }
6324
6325 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6326 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6327 *effectMode);
6328 if (NAME_NOT_FOUND != val) {
6329 uint8_t fwk_effectMode = (uint8_t)val;
6330 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6331 }
6332 }
6333
6334 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6335 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6336 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6337 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6338 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6339 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6340 }
6341 int32_t fwk_testPatternData[4];
6342 fwk_testPatternData[0] = testPatternData->r;
6343 fwk_testPatternData[3] = testPatternData->b;
6344 switch (gCamCapability[mCameraId]->color_arrangement) {
6345 case CAM_FILTER_ARRANGEMENT_RGGB:
6346 case CAM_FILTER_ARRANGEMENT_GRBG:
6347 fwk_testPatternData[1] = testPatternData->gr;
6348 fwk_testPatternData[2] = testPatternData->gb;
6349 break;
6350 case CAM_FILTER_ARRANGEMENT_GBRG:
6351 case CAM_FILTER_ARRANGEMENT_BGGR:
6352 fwk_testPatternData[2] = testPatternData->gr;
6353 fwk_testPatternData[1] = testPatternData->gb;
6354 break;
6355 default:
6356 LOGE("color arrangement %d is not supported",
6357 gCamCapability[mCameraId]->color_arrangement);
6358 break;
6359 }
6360 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6361 }
6362
6363 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6364 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6365 }
6366
6367 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6368 String8 str((const char *)gps_methods);
6369 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6370 }
6371
6372 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6373 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6374 }
6375
6376 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6377 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6378 }
6379
6380 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6381 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6382 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6383 }
6384
6385 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6386 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6387 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6388 }
6389
6390 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6391 int32_t fwk_thumb_size[2];
6392 fwk_thumb_size[0] = thumb_size->width;
6393 fwk_thumb_size[1] = thumb_size->height;
6394 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6395 }
6396
6397 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6398 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6399 privateData,
6400 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6401 }
6402
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006403 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6404 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6405 meteringMode, 1);
6406 }
6407
Thierry Strudel3d639192016-09-09 11:52:26 -07006408 if (metadata->is_tuning_params_valid) {
6409 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6410 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6411 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6412
6413
6414 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6415 sizeof(uint32_t));
6416 data += sizeof(uint32_t);
6417
6418 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6419 sizeof(uint32_t));
6420 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6421 data += sizeof(uint32_t);
6422
6423 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6424 sizeof(uint32_t));
6425 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6426 data += sizeof(uint32_t);
6427
6428 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6429 sizeof(uint32_t));
6430 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6431 data += sizeof(uint32_t);
6432
6433 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6434 sizeof(uint32_t));
6435 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6436 data += sizeof(uint32_t);
6437
6438 metadata->tuning_params.tuning_mod3_data_size = 0;
6439 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6440 sizeof(uint32_t));
6441 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6442 data += sizeof(uint32_t);
6443
6444 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6445 TUNING_SENSOR_DATA_MAX);
6446 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6447 count);
6448 data += count;
6449
6450 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6451 TUNING_VFE_DATA_MAX);
6452 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6453 count);
6454 data += count;
6455
6456 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6457 TUNING_CPP_DATA_MAX);
6458 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6459 count);
6460 data += count;
6461
6462 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6463 TUNING_CAC_DATA_MAX);
6464 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6465 count);
6466 data += count;
6467
6468 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6469 (int32_t *)(void *)tuning_meta_data_blob,
6470 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6471 }
6472
6473 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6474 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6475 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6476 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6477 NEUTRAL_COL_POINTS);
6478 }
6479
6480 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6481 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6482 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6483 }
6484
6485 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6486 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6487 // Adjust crop region from sensor output coordinate system to active
6488 // array coordinate system.
6489 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6490 hAeRegions->rect.width, hAeRegions->rect.height);
6491
6492 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6493 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6494 REGIONS_TUPLE_COUNT);
6495 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6496 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6497 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6498 hAeRegions->rect.height);
6499 }
6500
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006501 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6502 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6503 if (NAME_NOT_FOUND != val) {
6504 uint8_t fwkAfMode = (uint8_t)val;
6505 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6506 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6507 } else {
6508 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6509 val);
6510 }
6511 }
6512
Thierry Strudel3d639192016-09-09 11:52:26 -07006513 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6514 uint8_t fwk_afState = (uint8_t) *afState;
6515 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006516 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006517 }
6518
6519 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6520 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6521 }
6522
6523 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6524 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6525 }
6526
6527 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6528 uint8_t fwk_lensState = *lensState;
6529 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6530 }
6531
6532 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6533 /*af regions*/
6534 int32_t afRegions[REGIONS_TUPLE_COUNT];
6535 // Adjust crop region from sensor output coordinate system to active
6536 // array coordinate system.
6537 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6538 hAfRegions->rect.width, hAfRegions->rect.height);
6539
6540 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6541 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6542 REGIONS_TUPLE_COUNT);
6543 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6544 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6545 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6546 hAfRegions->rect.height);
6547 }
6548
6549 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006550 uint32_t ab_mode = *hal_ab_mode;
6551 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6552 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6553 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6554 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006555 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006556 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006557 if (NAME_NOT_FOUND != val) {
6558 uint8_t fwk_ab_mode = (uint8_t)val;
6559 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6560 }
6561 }
6562
6563 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6564 int val = lookupFwkName(SCENE_MODES_MAP,
6565 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6566 if (NAME_NOT_FOUND != val) {
6567 uint8_t fwkBestshotMode = (uint8_t)val;
6568 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6569 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6570 } else {
6571 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6572 }
6573 }
6574
6575 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6576 uint8_t fwk_mode = (uint8_t) *mode;
6577 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6578 }
6579
6580 /* Constant metadata values to be update*/
6581 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6582 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6583
6584 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6585 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6586
6587 int32_t hotPixelMap[2];
6588 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6589
6590 // CDS
6591 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6592 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6593 }
6594
Thierry Strudel04e026f2016-10-10 11:27:36 -07006595 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6596 int32_t fwk_hdr;
6597 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6598 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6599 } else {
6600 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6601 }
6602 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6603 }
6604
6605 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006606 int32_t fwk_ir = (int32_t) *ir;
6607 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006608 }
6609
Thierry Strudel269c81a2016-10-12 12:13:59 -07006610 // AEC SPEED
6611 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6612 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6613 }
6614
6615 // AWB SPEED
6616 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6617 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6618 }
6619
Thierry Strudel3d639192016-09-09 11:52:26 -07006620 // TNR
6621 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6622 uint8_t tnr_enable = tnr->denoise_enable;
6623 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6624
6625 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6626 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6627 }
6628
6629 // Reprocess crop data
6630 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6631 uint8_t cnt = crop_data->num_of_streams;
6632 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6633 // mm-qcamera-daemon only posts crop_data for streams
6634 // not linked to pproc. So no valid crop metadata is not
6635 // necessarily an error case.
6636 LOGD("No valid crop metadata entries");
6637 } else {
6638 uint32_t reproc_stream_id;
6639 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6640 LOGD("No reprocessible stream found, ignore crop data");
6641 } else {
6642 int rc = NO_ERROR;
6643 Vector<int32_t> roi_map;
6644 int32_t *crop = new int32_t[cnt*4];
6645 if (NULL == crop) {
6646 rc = NO_MEMORY;
6647 }
6648 if (NO_ERROR == rc) {
6649 int32_t streams_found = 0;
6650 for (size_t i = 0; i < cnt; i++) {
6651 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6652 if (pprocDone) {
6653 // HAL already does internal reprocessing,
6654 // either via reprocessing before JPEG encoding,
6655 // or offline postprocessing for pproc bypass case.
6656 crop[0] = 0;
6657 crop[1] = 0;
6658 crop[2] = mInputStreamInfo.dim.width;
6659 crop[3] = mInputStreamInfo.dim.height;
6660 } else {
6661 crop[0] = crop_data->crop_info[i].crop.left;
6662 crop[1] = crop_data->crop_info[i].crop.top;
6663 crop[2] = crop_data->crop_info[i].crop.width;
6664 crop[3] = crop_data->crop_info[i].crop.height;
6665 }
6666 roi_map.add(crop_data->crop_info[i].roi_map.left);
6667 roi_map.add(crop_data->crop_info[i].roi_map.top);
6668 roi_map.add(crop_data->crop_info[i].roi_map.width);
6669 roi_map.add(crop_data->crop_info[i].roi_map.height);
6670 streams_found++;
6671 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6672 crop[0], crop[1], crop[2], crop[3]);
6673 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6674 crop_data->crop_info[i].roi_map.left,
6675 crop_data->crop_info[i].roi_map.top,
6676 crop_data->crop_info[i].roi_map.width,
6677 crop_data->crop_info[i].roi_map.height);
6678 break;
6679
6680 }
6681 }
6682 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6683 &streams_found, 1);
6684 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6685 crop, (size_t)(streams_found * 4));
6686 if (roi_map.array()) {
6687 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6688 roi_map.array(), roi_map.size());
6689 }
6690 }
6691 if (crop) {
6692 delete [] crop;
6693 }
6694 }
6695 }
6696 }
6697
6698 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6699 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6700 // so hardcoding the CAC result to OFF mode.
6701 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6702 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6703 } else {
6704 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6705 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6706 *cacMode);
6707 if (NAME_NOT_FOUND != val) {
6708 uint8_t resultCacMode = (uint8_t)val;
6709 // check whether CAC result from CB is equal to Framework set CAC mode
6710 // If not equal then set the CAC mode came in corresponding request
6711 if (fwk_cacMode != resultCacMode) {
6712 resultCacMode = fwk_cacMode;
6713 }
6714 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6715 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6716 } else {
6717 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6718 }
6719 }
6720 }
6721
6722 // Post blob of cam_cds_data through vendor tag.
6723 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6724 uint8_t cnt = cdsInfo->num_of_streams;
6725 cam_cds_data_t cdsDataOverride;
6726 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6727 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6728 cdsDataOverride.num_of_streams = 1;
6729 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6730 uint32_t reproc_stream_id;
6731 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6732 LOGD("No reprocessible stream found, ignore cds data");
6733 } else {
6734 for (size_t i = 0; i < cnt; i++) {
6735 if (cdsInfo->cds_info[i].stream_id ==
6736 reproc_stream_id) {
6737 cdsDataOverride.cds_info[0].cds_enable =
6738 cdsInfo->cds_info[i].cds_enable;
6739 break;
6740 }
6741 }
6742 }
6743 } else {
6744 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6745 }
6746 camMetadata.update(QCAMERA3_CDS_INFO,
6747 (uint8_t *)&cdsDataOverride,
6748 sizeof(cam_cds_data_t));
6749 }
6750
6751 // Ldaf calibration data
6752 if (!mLdafCalibExist) {
6753 IF_META_AVAILABLE(uint32_t, ldafCalib,
6754 CAM_INTF_META_LDAF_EXIF, metadata) {
6755 mLdafCalibExist = true;
6756 mLdafCalib[0] = ldafCalib[0];
6757 mLdafCalib[1] = ldafCalib[1];
6758 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
6759 ldafCalib[0], ldafCalib[1]);
6760 }
6761 }
6762
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006763 // Reprocess and DDM debug data through vendor tag
6764 cam_reprocess_info_t repro_info;
6765 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006766 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
6767 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006768 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006769 }
6770 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
6771 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006772 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006773 }
6774 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
6775 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006776 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006777 }
6778 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
6779 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006780 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006781 }
6782 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
6783 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006784 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006785 }
6786 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006787 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006788 }
6789 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
6790 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006791 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006792 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006793 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
6794 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
6795 }
6796 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
6797 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
6798 }
6799 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
6800 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07006801
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006802 // INSTANT AEC MODE
6803 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
6804 CAM_INTF_PARM_INSTANT_AEC, metadata) {
6805 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
6806 }
6807
Shuzhen Wange763e802016-03-31 10:24:29 -07006808 // AF scene change
6809 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
6810 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
6811 }
6812
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006813 /* In batch mode, cache the first metadata in the batch */
6814 if (mBatchSize && firstMetadataInBatch) {
6815 mCachedMetadata.clear();
6816 mCachedMetadata = camMetadata;
6817 }
6818
Thierry Strudel3d639192016-09-09 11:52:26 -07006819 resultMetadata = camMetadata.release();
6820 return resultMetadata;
6821}
6822
6823/*===========================================================================
6824 * FUNCTION : saveExifParams
6825 *
6826 * DESCRIPTION:
6827 *
6828 * PARAMETERS :
6829 * @metadata : metadata information from callback
6830 *
6831 * RETURN : none
6832 *
6833 *==========================================================================*/
6834void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
6835{
6836 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
6837 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
6838 if (mExifParams.debug_params) {
6839 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
6840 mExifParams.debug_params->ae_debug_params_valid = TRUE;
6841 }
6842 }
6843 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
6844 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
6845 if (mExifParams.debug_params) {
6846 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
6847 mExifParams.debug_params->awb_debug_params_valid = TRUE;
6848 }
6849 }
6850 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
6851 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
6852 if (mExifParams.debug_params) {
6853 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
6854 mExifParams.debug_params->af_debug_params_valid = TRUE;
6855 }
6856 }
6857 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
6858 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
6859 if (mExifParams.debug_params) {
6860 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
6861 mExifParams.debug_params->asd_debug_params_valid = TRUE;
6862 }
6863 }
6864 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
6865 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
6866 if (mExifParams.debug_params) {
6867 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
6868 mExifParams.debug_params->stats_debug_params_valid = TRUE;
6869 }
6870 }
6871 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
6872 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
6873 if (mExifParams.debug_params) {
6874 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
6875 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
6876 }
6877 }
6878 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
6879 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
6880 if (mExifParams.debug_params) {
6881 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
6882 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
6883 }
6884 }
6885 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
6886 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
6887 if (mExifParams.debug_params) {
6888 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
6889 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
6890 }
6891 }
6892}
6893
6894/*===========================================================================
6895 * FUNCTION : get3AExifParams
6896 *
6897 * DESCRIPTION:
6898 *
6899 * PARAMETERS : none
6900 *
6901 *
6902 * RETURN : mm_jpeg_exif_params_t
6903 *
6904 *==========================================================================*/
6905mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
6906{
6907 return mExifParams;
6908}
6909
6910/*===========================================================================
6911 * FUNCTION : translateCbUrgentMetadataToResultMetadata
6912 *
6913 * DESCRIPTION:
6914 *
6915 * PARAMETERS :
6916 * @metadata : metadata information from callback
6917 *
6918 * RETURN : camera_metadata_t*
6919 * metadata in a format specified by fwk
6920 *==========================================================================*/
6921camera_metadata_t*
6922QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
6923 (metadata_buffer_t *metadata)
6924{
6925 CameraMetadata camMetadata;
6926 camera_metadata_t *resultMetadata;
6927
6928
6929 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
6930 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
6931 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
6932 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
6933 }
6934
6935 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
6936 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
6937 &aecTrigger->trigger, 1);
6938 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
6939 &aecTrigger->trigger_id, 1);
6940 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
6941 aecTrigger->trigger);
6942 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
6943 aecTrigger->trigger_id);
6944 }
6945
6946 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
6947 uint8_t fwk_ae_state = (uint8_t) *ae_state;
6948 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
6949 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
6950 }
6951
Thierry Strudel3d639192016-09-09 11:52:26 -07006952 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
6953 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
6954 &af_trigger->trigger, 1);
6955 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
6956 af_trigger->trigger);
6957 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
6958 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
6959 af_trigger->trigger_id);
6960 }
6961
6962 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
6963 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
6964 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
6965 if (NAME_NOT_FOUND != val) {
6966 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
6967 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
6968 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
6969 } else {
6970 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
6971 }
6972 }
6973
6974 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
6975 uint32_t aeMode = CAM_AE_MODE_MAX;
6976 int32_t flashMode = CAM_FLASH_MODE_MAX;
6977 int32_t redeye = -1;
6978 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
6979 aeMode = *pAeMode;
6980 }
6981 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
6982 flashMode = *pFlashMode;
6983 }
6984 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
6985 redeye = *pRedeye;
6986 }
6987
6988 if (1 == redeye) {
6989 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
6990 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6991 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
6992 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
6993 flashMode);
6994 if (NAME_NOT_FOUND != val) {
6995 fwk_aeMode = (uint8_t)val;
6996 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
6997 } else {
6998 LOGE("Unsupported flash mode %d", flashMode);
6999 }
7000 } else if (aeMode == CAM_AE_MODE_ON) {
7001 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7002 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7003 } else if (aeMode == CAM_AE_MODE_OFF) {
7004 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7005 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7006 } else {
7007 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7008 "flashMode:%d, aeMode:%u!!!",
7009 redeye, flashMode, aeMode);
7010 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007011 if (mInstantAEC) {
7012 // Increment frame Idx count untill a bound reached for instant AEC.
7013 mInstantAecFrameIdxCount++;
7014 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7015 CAM_INTF_META_AEC_INFO, metadata) {
7016 LOGH("ae_params->settled = %d",ae_params->settled);
7017 // If AEC settled, or if number of frames reached bound value,
7018 // should reset instant AEC.
7019 if (ae_params->settled ||
7020 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7021 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7022 mInstantAEC = false;
7023 mResetInstantAEC = true;
7024 mInstantAecFrameIdxCount = 0;
7025 }
7026 }
7027 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007028 resultMetadata = camMetadata.release();
7029 return resultMetadata;
7030}
7031
7032/*===========================================================================
7033 * FUNCTION : dumpMetadataToFile
7034 *
7035 * DESCRIPTION: Dumps tuning metadata to file system
7036 *
7037 * PARAMETERS :
7038 * @meta : tuning metadata
7039 * @dumpFrameCount : current dump frame count
7040 * @enabled : Enable mask
7041 *
7042 *==========================================================================*/
7043void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7044 uint32_t &dumpFrameCount,
7045 bool enabled,
7046 const char *type,
7047 uint32_t frameNumber)
7048{
7049 //Some sanity checks
7050 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7051 LOGE("Tuning sensor data size bigger than expected %d: %d",
7052 meta.tuning_sensor_data_size,
7053 TUNING_SENSOR_DATA_MAX);
7054 return;
7055 }
7056
7057 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7058 LOGE("Tuning VFE data size bigger than expected %d: %d",
7059 meta.tuning_vfe_data_size,
7060 TUNING_VFE_DATA_MAX);
7061 return;
7062 }
7063
7064 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7065 LOGE("Tuning CPP data size bigger than expected %d: %d",
7066 meta.tuning_cpp_data_size,
7067 TUNING_CPP_DATA_MAX);
7068 return;
7069 }
7070
7071 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7072 LOGE("Tuning CAC data size bigger than expected %d: %d",
7073 meta.tuning_cac_data_size,
7074 TUNING_CAC_DATA_MAX);
7075 return;
7076 }
7077 //
7078
7079 if(enabled){
7080 char timeBuf[FILENAME_MAX];
7081 char buf[FILENAME_MAX];
7082 memset(buf, 0, sizeof(buf));
7083 memset(timeBuf, 0, sizeof(timeBuf));
7084 time_t current_time;
7085 struct tm * timeinfo;
7086 time (&current_time);
7087 timeinfo = localtime (&current_time);
7088 if (timeinfo != NULL) {
7089 strftime (timeBuf, sizeof(timeBuf),
7090 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7091 }
7092 String8 filePath(timeBuf);
7093 snprintf(buf,
7094 sizeof(buf),
7095 "%dm_%s_%d.bin",
7096 dumpFrameCount,
7097 type,
7098 frameNumber);
7099 filePath.append(buf);
7100 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7101 if (file_fd >= 0) {
7102 ssize_t written_len = 0;
7103 meta.tuning_data_version = TUNING_DATA_VERSION;
7104 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7105 written_len += write(file_fd, data, sizeof(uint32_t));
7106 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7107 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7108 written_len += write(file_fd, data, sizeof(uint32_t));
7109 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7110 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7111 written_len += write(file_fd, data, sizeof(uint32_t));
7112 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7113 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7114 written_len += write(file_fd, data, sizeof(uint32_t));
7115 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7116 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7117 written_len += write(file_fd, data, sizeof(uint32_t));
7118 meta.tuning_mod3_data_size = 0;
7119 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7120 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7121 written_len += write(file_fd, data, sizeof(uint32_t));
7122 size_t total_size = meta.tuning_sensor_data_size;
7123 data = (void *)((uint8_t *)&meta.data);
7124 written_len += write(file_fd, data, total_size);
7125 total_size = meta.tuning_vfe_data_size;
7126 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7127 written_len += write(file_fd, data, total_size);
7128 total_size = meta.tuning_cpp_data_size;
7129 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7130 written_len += write(file_fd, data, total_size);
7131 total_size = meta.tuning_cac_data_size;
7132 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7133 written_len += write(file_fd, data, total_size);
7134 close(file_fd);
7135 }else {
7136 LOGE("fail to open file for metadata dumping");
7137 }
7138 }
7139}
7140
7141/*===========================================================================
7142 * FUNCTION : cleanAndSortStreamInfo
7143 *
7144 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7145 * and sort them such that raw stream is at the end of the list
7146 * This is a workaround for camera daemon constraint.
7147 *
7148 * PARAMETERS : None
7149 *
7150 *==========================================================================*/
7151void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7152{
7153 List<stream_info_t *> newStreamInfo;
7154
7155 /*clean up invalid streams*/
7156 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7157 it != mStreamInfo.end();) {
7158 if(((*it)->status) == INVALID){
7159 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7160 delete channel;
7161 free(*it);
7162 it = mStreamInfo.erase(it);
7163 } else {
7164 it++;
7165 }
7166 }
7167
7168 // Move preview/video/callback/snapshot streams into newList
7169 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7170 it != mStreamInfo.end();) {
7171 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7172 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7173 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7174 newStreamInfo.push_back(*it);
7175 it = mStreamInfo.erase(it);
7176 } else
7177 it++;
7178 }
7179 // Move raw streams into newList
7180 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7181 it != mStreamInfo.end();) {
7182 newStreamInfo.push_back(*it);
7183 it = mStreamInfo.erase(it);
7184 }
7185
7186 mStreamInfo = newStreamInfo;
7187}
7188
7189/*===========================================================================
7190 * FUNCTION : extractJpegMetadata
7191 *
7192 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7193 * JPEG metadata is cached in HAL, and return as part of capture
7194 * result when metadata is returned from camera daemon.
7195 *
7196 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7197 * @request: capture request
7198 *
7199 *==========================================================================*/
7200void QCamera3HardwareInterface::extractJpegMetadata(
7201 CameraMetadata& jpegMetadata,
7202 const camera3_capture_request_t *request)
7203{
7204 CameraMetadata frame_settings;
7205 frame_settings = request->settings;
7206
7207 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7208 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7209 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7210 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7211
7212 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7213 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7214 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7215 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7216
7217 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7218 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7219 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7220 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7221
7222 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7223 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7224 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7225 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7226
7227 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7228 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7229 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7230 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7231
7232 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7233 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7234 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7235 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7236
7237 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7238 int32_t thumbnail_size[2];
7239 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7240 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7241 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7242 int32_t orientation =
7243 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007245 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7246 int32_t temp;
7247 temp = thumbnail_size[0];
7248 thumbnail_size[0] = thumbnail_size[1];
7249 thumbnail_size[1] = temp;
7250 }
7251 }
7252 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7253 thumbnail_size,
7254 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7255 }
7256
7257}
7258
7259/*===========================================================================
7260 * FUNCTION : convertToRegions
7261 *
7262 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7263 *
7264 * PARAMETERS :
7265 * @rect : cam_rect_t struct to convert
7266 * @region : int32_t destination array
7267 * @weight : if we are converting from cam_area_t, weight is valid
7268 * else weight = -1
7269 *
7270 *==========================================================================*/
7271void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7272 int32_t *region, int weight)
7273{
7274 region[0] = rect.left;
7275 region[1] = rect.top;
7276 region[2] = rect.left + rect.width;
7277 region[3] = rect.top + rect.height;
7278 if (weight > -1) {
7279 region[4] = weight;
7280 }
7281}
7282
7283/*===========================================================================
7284 * FUNCTION : convertFromRegions
7285 *
7286 * DESCRIPTION: helper method to convert from array to cam_rect_t
7287 *
7288 * PARAMETERS :
7289 * @rect : cam_rect_t struct to convert
7290 * @region : int32_t destination array
7291 * @weight : if we are converting from cam_area_t, weight is valid
7292 * else weight = -1
7293 *
7294 *==========================================================================*/
7295void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7296 const camera_metadata_t *settings, uint32_t tag)
7297{
7298 CameraMetadata frame_settings;
7299 frame_settings = settings;
7300 int32_t x_min = frame_settings.find(tag).data.i32[0];
7301 int32_t y_min = frame_settings.find(tag).data.i32[1];
7302 int32_t x_max = frame_settings.find(tag).data.i32[2];
7303 int32_t y_max = frame_settings.find(tag).data.i32[3];
7304 roi.weight = frame_settings.find(tag).data.i32[4];
7305 roi.rect.left = x_min;
7306 roi.rect.top = y_min;
7307 roi.rect.width = x_max - x_min;
7308 roi.rect.height = y_max - y_min;
7309}
7310
7311/*===========================================================================
7312 * FUNCTION : resetIfNeededROI
7313 *
7314 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7315 * crop region
7316 *
7317 * PARAMETERS :
7318 * @roi : cam_area_t struct to resize
7319 * @scalerCropRegion : cam_crop_region_t region to compare against
7320 *
7321 *
7322 *==========================================================================*/
7323bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7324 const cam_crop_region_t* scalerCropRegion)
7325{
7326 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7327 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7328 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7329 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7330
7331 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7332 * without having this check the calculations below to validate if the roi
7333 * is inside scalar crop region will fail resulting in the roi not being
7334 * reset causing algorithm to continue to use stale roi window
7335 */
7336 if (roi->weight == 0) {
7337 return true;
7338 }
7339
7340 if ((roi_x_max < scalerCropRegion->left) ||
7341 // right edge of roi window is left of scalar crop's left edge
7342 (roi_y_max < scalerCropRegion->top) ||
7343 // bottom edge of roi window is above scalar crop's top edge
7344 (roi->rect.left > crop_x_max) ||
7345 // left edge of roi window is beyond(right) of scalar crop's right edge
7346 (roi->rect.top > crop_y_max)){
7347 // top edge of roi windo is above scalar crop's top edge
7348 return false;
7349 }
7350 if (roi->rect.left < scalerCropRegion->left) {
7351 roi->rect.left = scalerCropRegion->left;
7352 }
7353 if (roi->rect.top < scalerCropRegion->top) {
7354 roi->rect.top = scalerCropRegion->top;
7355 }
7356 if (roi_x_max > crop_x_max) {
7357 roi_x_max = crop_x_max;
7358 }
7359 if (roi_y_max > crop_y_max) {
7360 roi_y_max = crop_y_max;
7361 }
7362 roi->rect.width = roi_x_max - roi->rect.left;
7363 roi->rect.height = roi_y_max - roi->rect.top;
7364 return true;
7365}
7366
7367/*===========================================================================
7368 * FUNCTION : convertLandmarks
7369 *
7370 * DESCRIPTION: helper method to extract the landmarks from face detection info
7371 *
7372 * PARAMETERS :
7373 * @landmark_data : input landmark data to be converted
7374 * @landmarks : int32_t destination array
7375 *
7376 *
7377 *==========================================================================*/
7378void QCamera3HardwareInterface::convertLandmarks(
7379 cam_face_landmarks_info_t landmark_data,
7380 int32_t *landmarks)
7381{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007382 if (landmark_data.is_left_eye_valid) {
7383 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7384 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7385 } else {
7386 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7387 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7388 }
7389
7390 if (landmark_data.is_right_eye_valid) {
7391 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7392 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7393 } else {
7394 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7395 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7396 }
7397
7398 if (landmark_data.is_mouth_valid) {
7399 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7400 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7401 } else {
7402 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7403 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7404 }
7405}
7406
7407/*===========================================================================
7408 * FUNCTION : setInvalidLandmarks
7409 *
7410 * DESCRIPTION: helper method to set invalid landmarks
7411 *
7412 * PARAMETERS :
7413 * @landmarks : int32_t destination array
7414 *
7415 *
7416 *==========================================================================*/
7417void QCamera3HardwareInterface::setInvalidLandmarks(
7418 int32_t *landmarks)
7419{
7420 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7421 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7422 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7423 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7424 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7425 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007426}
7427
7428#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007429
7430/*===========================================================================
7431 * FUNCTION : getCapabilities
7432 *
7433 * DESCRIPTION: query camera capability from back-end
7434 *
7435 * PARAMETERS :
7436 * @ops : mm-interface ops structure
7437 * @cam_handle : camera handle for which we need capability
7438 *
7439 * RETURN : ptr type of capability structure
7440 * capability for success
7441 * NULL for failure
7442 *==========================================================================*/
7443cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7444 uint32_t cam_handle)
7445{
7446 int rc = NO_ERROR;
7447 QCamera3HeapMemory *capabilityHeap = NULL;
7448 cam_capability_t *cap_ptr = NULL;
7449
7450 if (ops == NULL) {
7451 LOGE("Invalid arguments");
7452 return NULL;
7453 }
7454
7455 capabilityHeap = new QCamera3HeapMemory(1);
7456 if (capabilityHeap == NULL) {
7457 LOGE("creation of capabilityHeap failed");
7458 return NULL;
7459 }
7460
7461 /* Allocate memory for capability buffer */
7462 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7463 if(rc != OK) {
7464 LOGE("No memory for cappability");
7465 goto allocate_failed;
7466 }
7467
7468 /* Map memory for capability buffer */
7469 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7470
7471 rc = ops->map_buf(cam_handle,
7472 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7473 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7474 if(rc < 0) {
7475 LOGE("failed to map capability buffer");
7476 rc = FAILED_TRANSACTION;
7477 goto map_failed;
7478 }
7479
7480 /* Query Capability */
7481 rc = ops->query_capability(cam_handle);
7482 if(rc < 0) {
7483 LOGE("failed to query capability");
7484 rc = FAILED_TRANSACTION;
7485 goto query_failed;
7486 }
7487
7488 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7489 if (cap_ptr == NULL) {
7490 LOGE("out of memory");
7491 rc = NO_MEMORY;
7492 goto query_failed;
7493 }
7494
7495 memset(cap_ptr, 0, sizeof(cam_capability_t));
7496 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7497
7498 int index;
7499 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7500 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7501 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7502 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7503 }
7504
7505query_failed:
7506 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7507map_failed:
7508 capabilityHeap->deallocate();
7509allocate_failed:
7510 delete capabilityHeap;
7511
7512 if (rc != NO_ERROR) {
7513 return NULL;
7514 } else {
7515 return cap_ptr;
7516 }
7517}
7518
Thierry Strudel3d639192016-09-09 11:52:26 -07007519/*===========================================================================
7520 * FUNCTION : initCapabilities
7521 *
7522 * DESCRIPTION: initialize camera capabilities in static data struct
7523 *
7524 * PARAMETERS :
7525 * @cameraId : camera Id
7526 *
7527 * RETURN : int32_t type of status
7528 * NO_ERROR -- success
7529 * none-zero failure code
7530 *==========================================================================*/
7531int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7532{
7533 int rc = 0;
7534 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007535 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007536
7537 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7538 if (rc) {
7539 LOGE("camera_open failed. rc = %d", rc);
7540 goto open_failed;
7541 }
7542 if (!cameraHandle) {
7543 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7544 goto open_failed;
7545 }
7546
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007547 handle = get_main_camera_handle(cameraHandle->camera_handle);
7548 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7549 if (gCamCapability[cameraId] == NULL) {
7550 rc = FAILED_TRANSACTION;
7551 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007552 }
7553
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007554 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007555 if (is_dual_camera_by_idx(cameraId)) {
7556 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7557 gCamCapability[cameraId]->aux_cam_cap =
7558 getCapabilities(cameraHandle->ops, handle);
7559 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7560 rc = FAILED_TRANSACTION;
7561 free(gCamCapability[cameraId]);
7562 goto failed_op;
7563 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007564
7565 // Copy the main camera capability to main_cam_cap struct
7566 gCamCapability[cameraId]->main_cam_cap =
7567 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7568 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7569 LOGE("out of memory");
7570 rc = NO_MEMORY;
7571 goto failed_op;
7572 }
7573 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7574 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007575 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007576failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007577 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7578 cameraHandle = NULL;
7579open_failed:
7580 return rc;
7581}
7582
7583/*==========================================================================
7584 * FUNCTION : get3Aversion
7585 *
7586 * DESCRIPTION: get the Q3A S/W version
7587 *
7588 * PARAMETERS :
7589 * @sw_version: Reference of Q3A structure which will hold version info upon
7590 * return
7591 *
7592 * RETURN : None
7593 *
7594 *==========================================================================*/
7595void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7596{
7597 if(gCamCapability[mCameraId])
7598 sw_version = gCamCapability[mCameraId]->q3a_version;
7599 else
7600 LOGE("Capability structure NULL!");
7601}
7602
7603
7604/*===========================================================================
7605 * FUNCTION : initParameters
7606 *
7607 * DESCRIPTION: initialize camera parameters
7608 *
7609 * PARAMETERS :
7610 *
7611 * RETURN : int32_t type of status
7612 * NO_ERROR -- success
7613 * none-zero failure code
7614 *==========================================================================*/
7615int QCamera3HardwareInterface::initParameters()
7616{
7617 int rc = 0;
7618
7619 //Allocate Set Param Buffer
7620 mParamHeap = new QCamera3HeapMemory(1);
7621 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7622 if(rc != OK) {
7623 rc = NO_MEMORY;
7624 LOGE("Failed to allocate SETPARM Heap memory");
7625 delete mParamHeap;
7626 mParamHeap = NULL;
7627 return rc;
7628 }
7629
7630 //Map memory for parameters buffer
7631 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7632 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7633 mParamHeap->getFd(0),
7634 sizeof(metadata_buffer_t),
7635 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7636 if(rc < 0) {
7637 LOGE("failed to map SETPARM buffer");
7638 rc = FAILED_TRANSACTION;
7639 mParamHeap->deallocate();
7640 delete mParamHeap;
7641 mParamHeap = NULL;
7642 return rc;
7643 }
7644
7645 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7646
7647 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7648 return rc;
7649}
7650
7651/*===========================================================================
7652 * FUNCTION : deinitParameters
7653 *
7654 * DESCRIPTION: de-initialize camera parameters
7655 *
7656 * PARAMETERS :
7657 *
7658 * RETURN : NONE
7659 *==========================================================================*/
7660void QCamera3HardwareInterface::deinitParameters()
7661{
7662 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7663 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7664
7665 mParamHeap->deallocate();
7666 delete mParamHeap;
7667 mParamHeap = NULL;
7668
7669 mParameters = NULL;
7670
7671 free(mPrevParameters);
7672 mPrevParameters = NULL;
7673}
7674
7675/*===========================================================================
7676 * FUNCTION : calcMaxJpegSize
7677 *
7678 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7679 *
7680 * PARAMETERS :
7681 *
7682 * RETURN : max_jpeg_size
7683 *==========================================================================*/
7684size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7685{
7686 size_t max_jpeg_size = 0;
7687 size_t temp_width, temp_height;
7688 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7689 MAX_SIZES_CNT);
7690 for (size_t i = 0; i < count; i++) {
7691 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7692 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7693 if (temp_width * temp_height > max_jpeg_size ) {
7694 max_jpeg_size = temp_width * temp_height;
7695 }
7696 }
7697 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7698 return max_jpeg_size;
7699}
7700
7701/*===========================================================================
7702 * FUNCTION : getMaxRawSize
7703 *
7704 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7705 *
7706 * PARAMETERS :
7707 *
7708 * RETURN : Largest supported Raw Dimension
7709 *==========================================================================*/
7710cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7711{
7712 int max_width = 0;
7713 cam_dimension_t maxRawSize;
7714
7715 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7716 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7717 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7718 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7719 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7720 }
7721 }
7722 return maxRawSize;
7723}
7724
7725
7726/*===========================================================================
7727 * FUNCTION : calcMaxJpegDim
7728 *
7729 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7730 *
7731 * PARAMETERS :
7732 *
7733 * RETURN : max_jpeg_dim
7734 *==========================================================================*/
7735cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7736{
7737 cam_dimension_t max_jpeg_dim;
7738 cam_dimension_t curr_jpeg_dim;
7739 max_jpeg_dim.width = 0;
7740 max_jpeg_dim.height = 0;
7741 curr_jpeg_dim.width = 0;
7742 curr_jpeg_dim.height = 0;
7743 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7744 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7745 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
7746 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
7747 max_jpeg_dim.width * max_jpeg_dim.height ) {
7748 max_jpeg_dim.width = curr_jpeg_dim.width;
7749 max_jpeg_dim.height = curr_jpeg_dim.height;
7750 }
7751 }
7752 return max_jpeg_dim;
7753}
7754
7755/*===========================================================================
7756 * FUNCTION : addStreamConfig
7757 *
7758 * DESCRIPTION: adds the stream configuration to the array
7759 *
7760 * PARAMETERS :
7761 * @available_stream_configs : pointer to stream configuration array
7762 * @scalar_format : scalar format
7763 * @dim : configuration dimension
7764 * @config_type : input or output configuration type
7765 *
7766 * RETURN : NONE
7767 *==========================================================================*/
7768void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
7769 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
7770{
7771 available_stream_configs.add(scalar_format);
7772 available_stream_configs.add(dim.width);
7773 available_stream_configs.add(dim.height);
7774 available_stream_configs.add(config_type);
7775}
7776
7777/*===========================================================================
7778 * FUNCTION : suppportBurstCapture
7779 *
7780 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
7781 *
7782 * PARAMETERS :
7783 * @cameraId : camera Id
7784 *
7785 * RETURN : true if camera supports BURST_CAPTURE
7786 * false otherwise
7787 *==========================================================================*/
7788bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
7789{
7790 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
7791 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
7792 const int32_t highResWidth = 3264;
7793 const int32_t highResHeight = 2448;
7794
7795 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
7796 // Maximum resolution images cannot be captured at >= 10fps
7797 // -> not supporting BURST_CAPTURE
7798 return false;
7799 }
7800
7801 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
7802 // Maximum resolution images can be captured at >= 20fps
7803 // --> supporting BURST_CAPTURE
7804 return true;
7805 }
7806
7807 // Find the smallest highRes resolution, or largest resolution if there is none
7808 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
7809 MAX_SIZES_CNT);
7810 size_t highRes = 0;
7811 while ((highRes + 1 < totalCnt) &&
7812 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
7813 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
7814 highResWidth * highResHeight)) {
7815 highRes++;
7816 }
7817 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
7818 return true;
7819 } else {
7820 return false;
7821 }
7822}
7823
7824/*===========================================================================
7825 * FUNCTION : initStaticMetadata
7826 *
7827 * DESCRIPTION: initialize the static metadata
7828 *
7829 * PARAMETERS :
7830 * @cameraId : camera Id
7831 *
7832 * RETURN : int32_t type of status
7833 * 0 -- success
7834 * non-zero failure code
7835 *==========================================================================*/
7836int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
7837{
7838 int rc = 0;
7839 CameraMetadata staticInfo;
7840 size_t count = 0;
7841 bool limitedDevice = false;
7842 char prop[PROPERTY_VALUE_MAX];
7843 bool supportBurst = false;
7844
7845 supportBurst = supportBurstCapture(cameraId);
7846
7847 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
7848 * guaranteed or if min fps of max resolution is less than 20 fps, its
7849 * advertised as limited device*/
7850 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
7851 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
7852 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
7853 !supportBurst;
7854
7855 uint8_t supportedHwLvl = limitedDevice ?
7856 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007857#ifndef USE_HAL_3_3
7858 // LEVEL_3 - This device will support level 3.
7859 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
7860#else
Thierry Strudel3d639192016-09-09 11:52:26 -07007861 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007862#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007863
7864 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7865 &supportedHwLvl, 1);
7866
7867 bool facingBack = false;
7868 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
7869 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
7870 facingBack = true;
7871 }
7872 /*HAL 3 only*/
7873 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7874 &gCamCapability[cameraId]->min_focus_distance, 1);
7875
7876 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
7877 &gCamCapability[cameraId]->hyper_focal_distance, 1);
7878
7879 /*should be using focal lengths but sensor doesn't provide that info now*/
7880 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7881 &gCamCapability[cameraId]->focal_length,
7882 1);
7883
7884 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7885 gCamCapability[cameraId]->apertures,
7886 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
7887
7888 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7889 gCamCapability[cameraId]->filter_densities,
7890 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
7891
7892
7893 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7894 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
7895 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
7896
7897 int32_t lens_shading_map_size[] = {
7898 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
7899 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
7900 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
7901 lens_shading_map_size,
7902 sizeof(lens_shading_map_size)/sizeof(int32_t));
7903
7904 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
7905 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
7906
7907 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
7908 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
7909
7910 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7911 &gCamCapability[cameraId]->max_frame_duration, 1);
7912
7913 camera_metadata_rational baseGainFactor = {
7914 gCamCapability[cameraId]->base_gain_factor.numerator,
7915 gCamCapability[cameraId]->base_gain_factor.denominator};
7916 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
7917 &baseGainFactor, 1);
7918
7919 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7920 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
7921
7922 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
7923 gCamCapability[cameraId]->pixel_array_size.height};
7924 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7925 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
7926
7927 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
7928 gCamCapability[cameraId]->active_array_size.top,
7929 gCamCapability[cameraId]->active_array_size.width,
7930 gCamCapability[cameraId]->active_array_size.height};
7931 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7932 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
7933
7934 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
7935 &gCamCapability[cameraId]->white_level, 1);
7936
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007937 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
7938 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
7939 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007940 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007941 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07007942
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007943#ifndef USE_HAL_3_3
7944 bool hasBlackRegions = false;
7945 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
7946 LOGW("black_region_count: %d is bounded to %d",
7947 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
7948 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
7949 }
7950 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
7951 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
7952 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
7953 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
7954 }
7955 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
7956 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
7957 hasBlackRegions = true;
7958 }
7959#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
7961 &gCamCapability[cameraId]->flash_charge_duration, 1);
7962
7963 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
7964 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
7965
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007966 // SOF timestamp is based on monotonic_boottime. So advertize REALTIME timesource
7967 // REALTIME defined in HAL3 API is same as linux's CLOCK_BOOTTIME
7968 // Ref: kernel/...../msm_isp_util.c: msm_isp_get_timestamp: get_monotonic_boottime
7969 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
Thierry Strudel3d639192016-09-09 11:52:26 -07007970 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7971 &timestampSource, 1);
7972
7973 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7974 &gCamCapability[cameraId]->histogram_size, 1);
7975
7976 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7977 &gCamCapability[cameraId]->max_histogram_count, 1);
7978
7979 int32_t sharpness_map_size[] = {
7980 gCamCapability[cameraId]->sharpness_map_size.width,
7981 gCamCapability[cameraId]->sharpness_map_size.height};
7982
7983 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
7984 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
7985
7986 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7987 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
7988
7989 int32_t scalar_formats[] = {
7990 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
7991 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
7992 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
7993 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
7994 HAL_PIXEL_FORMAT_RAW10,
7995 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
7996 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
7997 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
7998 scalar_formats,
7999 scalar_formats_count);
8000
8001 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8002 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8003 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8004 count, MAX_SIZES_CNT, available_processed_sizes);
8005 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8006 available_processed_sizes, count * 2);
8007
8008 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8009 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8010 makeTable(gCamCapability[cameraId]->raw_dim,
8011 count, MAX_SIZES_CNT, available_raw_sizes);
8012 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8013 available_raw_sizes, count * 2);
8014
8015 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8016 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8017 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8018 count, MAX_SIZES_CNT, available_fps_ranges);
8019 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8020 available_fps_ranges, count * 2);
8021
8022 camera_metadata_rational exposureCompensationStep = {
8023 gCamCapability[cameraId]->exp_compensation_step.numerator,
8024 gCamCapability[cameraId]->exp_compensation_step.denominator};
8025 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8026 &exposureCompensationStep, 1);
8027
8028 Vector<uint8_t> availableVstabModes;
8029 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8030 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008031 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008032 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008033 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008034 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008035 count = IS_TYPE_MAX;
8036 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8037 for (size_t i = 0; i < count; i++) {
8038 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8039 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8040 eisSupported = true;
8041 break;
8042 }
8043 }
8044 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008045 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8046 }
8047 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8048 availableVstabModes.array(), availableVstabModes.size());
8049
8050 /*HAL 1 and HAL 3 common*/
8051 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8052 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8053 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8054 float maxZoom = maxZoomStep/minZoomStep;
8055 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8056 &maxZoom, 1);
8057
8058 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8059 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8060
8061 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8062 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8063 max3aRegions[2] = 0; /* AF not supported */
8064 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8065 max3aRegions, 3);
8066
8067 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8068 memset(prop, 0, sizeof(prop));
8069 property_get("persist.camera.facedetect", prop, "1");
8070 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8071 LOGD("Support face detection mode: %d",
8072 supportedFaceDetectMode);
8073
8074 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008075 /* support mode should be OFF if max number of face is 0 */
8076 if (maxFaces <= 0) {
8077 supportedFaceDetectMode = 0;
8078 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008079 Vector<uint8_t> availableFaceDetectModes;
8080 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8081 if (supportedFaceDetectMode == 1) {
8082 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8083 } else if (supportedFaceDetectMode == 2) {
8084 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8085 } else if (supportedFaceDetectMode == 3) {
8086 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8087 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8088 } else {
8089 maxFaces = 0;
8090 }
8091 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8092 availableFaceDetectModes.array(),
8093 availableFaceDetectModes.size());
8094 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8095 (int32_t *)&maxFaces, 1);
8096
8097 int32_t exposureCompensationRange[] = {
8098 gCamCapability[cameraId]->exposure_compensation_min,
8099 gCamCapability[cameraId]->exposure_compensation_max};
8100 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8101 exposureCompensationRange,
8102 sizeof(exposureCompensationRange)/sizeof(int32_t));
8103
8104 uint8_t lensFacing = (facingBack) ?
8105 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8106 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8107
8108 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8109 available_thumbnail_sizes,
8110 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8111
8112 /*all sizes will be clubbed into this tag*/
8113 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8114 /*android.scaler.availableStreamConfigurations*/
8115 Vector<int32_t> available_stream_configs;
8116 cam_dimension_t active_array_dim;
8117 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8118 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8119 /* Add input/output stream configurations for each scalar formats*/
8120 for (size_t j = 0; j < scalar_formats_count; j++) {
8121 switch (scalar_formats[j]) {
8122 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8123 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8124 case HAL_PIXEL_FORMAT_RAW10:
8125 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8126 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8127 addStreamConfig(available_stream_configs, scalar_formats[j],
8128 gCamCapability[cameraId]->raw_dim[i],
8129 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8130 }
8131 break;
8132 case HAL_PIXEL_FORMAT_BLOB:
8133 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8134 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8135 addStreamConfig(available_stream_configs, scalar_formats[j],
8136 gCamCapability[cameraId]->picture_sizes_tbl[i],
8137 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8138 }
8139 break;
8140 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8141 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8142 default:
8143 cam_dimension_t largest_picture_size;
8144 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8145 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8146 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8147 addStreamConfig(available_stream_configs, scalar_formats[j],
8148 gCamCapability[cameraId]->picture_sizes_tbl[i],
8149 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8150 /* Book keep largest */
8151 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8152 >= largest_picture_size.width &&
8153 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8154 >= largest_picture_size.height)
8155 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8156 }
8157 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8158 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8159 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8160 addStreamConfig(available_stream_configs, scalar_formats[j],
8161 largest_picture_size,
8162 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8163 }
8164 break;
8165 }
8166 }
8167
8168 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8169 available_stream_configs.array(), available_stream_configs.size());
8170 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8171 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8172
8173 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8174 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8175
8176 /* android.scaler.availableMinFrameDurations */
8177 Vector<int64_t> available_min_durations;
8178 for (size_t j = 0; j < scalar_formats_count; j++) {
8179 switch (scalar_formats[j]) {
8180 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8181 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8182 case HAL_PIXEL_FORMAT_RAW10:
8183 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8184 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8185 available_min_durations.add(scalar_formats[j]);
8186 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8187 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8188 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8189 }
8190 break;
8191 default:
8192 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8193 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8194 available_min_durations.add(scalar_formats[j]);
8195 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8196 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8197 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8198 }
8199 break;
8200 }
8201 }
8202 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8203 available_min_durations.array(), available_min_durations.size());
8204
8205 Vector<int32_t> available_hfr_configs;
8206 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8207 int32_t fps = 0;
8208 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8209 case CAM_HFR_MODE_60FPS:
8210 fps = 60;
8211 break;
8212 case CAM_HFR_MODE_90FPS:
8213 fps = 90;
8214 break;
8215 case CAM_HFR_MODE_120FPS:
8216 fps = 120;
8217 break;
8218 case CAM_HFR_MODE_150FPS:
8219 fps = 150;
8220 break;
8221 case CAM_HFR_MODE_180FPS:
8222 fps = 180;
8223 break;
8224 case CAM_HFR_MODE_210FPS:
8225 fps = 210;
8226 break;
8227 case CAM_HFR_MODE_240FPS:
8228 fps = 240;
8229 break;
8230 case CAM_HFR_MODE_480FPS:
8231 fps = 480;
8232 break;
8233 case CAM_HFR_MODE_OFF:
8234 case CAM_HFR_MODE_MAX:
8235 default:
8236 break;
8237 }
8238
8239 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8240 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8241 /* For each HFR frame rate, need to advertise one variable fps range
8242 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8243 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8244 * set by the app. When video recording is started, [120, 120] is
8245 * set. This way sensor configuration does not change when recording
8246 * is started */
8247
8248 /* (width, height, fps_min, fps_max, batch_size_max) */
8249 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8250 j < MAX_SIZES_CNT; j++) {
8251 available_hfr_configs.add(
8252 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8253 available_hfr_configs.add(
8254 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8255 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8256 available_hfr_configs.add(fps);
8257 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8258
8259 /* (width, height, fps_min, fps_max, batch_size_max) */
8260 available_hfr_configs.add(
8261 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8262 available_hfr_configs.add(
8263 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8264 available_hfr_configs.add(fps);
8265 available_hfr_configs.add(fps);
8266 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8267 }
8268 }
8269 }
8270 //Advertise HFR capability only if the property is set
8271 memset(prop, 0, sizeof(prop));
8272 property_get("persist.camera.hal3hfr.enable", prop, "1");
8273 uint8_t hfrEnable = (uint8_t)atoi(prop);
8274
8275 if(hfrEnable && available_hfr_configs.array()) {
8276 staticInfo.update(
8277 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8278 available_hfr_configs.array(), available_hfr_configs.size());
8279 }
8280
8281 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8282 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8283 &max_jpeg_size, 1);
8284
8285 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8286 size_t size = 0;
8287 count = CAM_EFFECT_MODE_MAX;
8288 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8289 for (size_t i = 0; i < count; i++) {
8290 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8291 gCamCapability[cameraId]->supported_effects[i]);
8292 if (NAME_NOT_FOUND != val) {
8293 avail_effects[size] = (uint8_t)val;
8294 size++;
8295 }
8296 }
8297 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8298 avail_effects,
8299 size);
8300
8301 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8302 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8303 size_t supported_scene_modes_cnt = 0;
8304 count = CAM_SCENE_MODE_MAX;
8305 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8306 for (size_t i = 0; i < count; i++) {
8307 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8308 CAM_SCENE_MODE_OFF) {
8309 int val = lookupFwkName(SCENE_MODES_MAP,
8310 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8311 gCamCapability[cameraId]->supported_scene_modes[i]);
8312 if (NAME_NOT_FOUND != val) {
8313 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8314 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8315 supported_scene_modes_cnt++;
8316 }
8317 }
8318 }
8319 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8320 avail_scene_modes,
8321 supported_scene_modes_cnt);
8322
8323 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8324 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8325 supported_scene_modes_cnt,
8326 CAM_SCENE_MODE_MAX,
8327 scene_mode_overrides,
8328 supported_indexes,
8329 cameraId);
8330
8331 if (supported_scene_modes_cnt == 0) {
8332 supported_scene_modes_cnt = 1;
8333 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8334 }
8335
8336 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8337 scene_mode_overrides, supported_scene_modes_cnt * 3);
8338
8339 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8340 ANDROID_CONTROL_MODE_AUTO,
8341 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8342 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8343 available_control_modes,
8344 3);
8345
8346 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8347 size = 0;
8348 count = CAM_ANTIBANDING_MODE_MAX;
8349 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8350 for (size_t i = 0; i < count; i++) {
8351 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8352 gCamCapability[cameraId]->supported_antibandings[i]);
8353 if (NAME_NOT_FOUND != val) {
8354 avail_antibanding_modes[size] = (uint8_t)val;
8355 size++;
8356 }
8357
8358 }
8359 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8360 avail_antibanding_modes,
8361 size);
8362
8363 uint8_t avail_abberation_modes[] = {
8364 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8365 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8366 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8367 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8368 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8369 if (0 == count) {
8370 // If no aberration correction modes are available for a device, this advertise OFF mode
8371 size = 1;
8372 } else {
8373 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8374 // So, advertize all 3 modes if atleast any one mode is supported as per the
8375 // new M requirement
8376 size = 3;
8377 }
8378 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8379 avail_abberation_modes,
8380 size);
8381
8382 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8383 size = 0;
8384 count = CAM_FOCUS_MODE_MAX;
8385 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8386 for (size_t i = 0; i < count; i++) {
8387 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8388 gCamCapability[cameraId]->supported_focus_modes[i]);
8389 if (NAME_NOT_FOUND != val) {
8390 avail_af_modes[size] = (uint8_t)val;
8391 size++;
8392 }
8393 }
8394 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8395 avail_af_modes,
8396 size);
8397
8398 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8399 size = 0;
8400 count = CAM_WB_MODE_MAX;
8401 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8402 for (size_t i = 0; i < count; i++) {
8403 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8404 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8405 gCamCapability[cameraId]->supported_white_balances[i]);
8406 if (NAME_NOT_FOUND != val) {
8407 avail_awb_modes[size] = (uint8_t)val;
8408 size++;
8409 }
8410 }
8411 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8412 avail_awb_modes,
8413 size);
8414
8415 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8416 count = CAM_FLASH_FIRING_LEVEL_MAX;
8417 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8418 count);
8419 for (size_t i = 0; i < count; i++) {
8420 available_flash_levels[i] =
8421 gCamCapability[cameraId]->supported_firing_levels[i];
8422 }
8423 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8424 available_flash_levels, count);
8425
8426 uint8_t flashAvailable;
8427 if (gCamCapability[cameraId]->flash_available)
8428 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8429 else
8430 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8431 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8432 &flashAvailable, 1);
8433
8434 Vector<uint8_t> avail_ae_modes;
8435 count = CAM_AE_MODE_MAX;
8436 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8437 for (size_t i = 0; i < count; i++) {
8438 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8439 }
8440 if (flashAvailable) {
8441 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8442 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8443 }
8444 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8445 avail_ae_modes.array(),
8446 avail_ae_modes.size());
8447
8448 int32_t sensitivity_range[2];
8449 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8450 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8451 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8452 sensitivity_range,
8453 sizeof(sensitivity_range) / sizeof(int32_t));
8454
8455 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8456 &gCamCapability[cameraId]->max_analog_sensitivity,
8457 1);
8458
8459 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8460 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8461 &sensor_orientation,
8462 1);
8463
8464 int32_t max_output_streams[] = {
8465 MAX_STALLING_STREAMS,
8466 MAX_PROCESSED_STREAMS,
8467 MAX_RAW_STREAMS};
8468 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8469 max_output_streams,
8470 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8471
8472 uint8_t avail_leds = 0;
8473 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8474 &avail_leds, 0);
8475
8476 uint8_t focus_dist_calibrated;
8477 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8478 gCamCapability[cameraId]->focus_dist_calibrated);
8479 if (NAME_NOT_FOUND != val) {
8480 focus_dist_calibrated = (uint8_t)val;
8481 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8482 &focus_dist_calibrated, 1);
8483 }
8484
8485 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8486 size = 0;
8487 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8488 MAX_TEST_PATTERN_CNT);
8489 for (size_t i = 0; i < count; i++) {
8490 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8491 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8492 if (NAME_NOT_FOUND != testpatternMode) {
8493 avail_testpattern_modes[size] = testpatternMode;
8494 size++;
8495 }
8496 }
8497 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8498 avail_testpattern_modes,
8499 size);
8500
8501 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8502 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8503 &max_pipeline_depth,
8504 1);
8505
8506 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8507 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8508 &partial_result_count,
8509 1);
8510
8511 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8512 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8513
8514 Vector<uint8_t> available_capabilities;
8515 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8516 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8517 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8518 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8519 if (supportBurst) {
8520 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8521 }
8522 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8523 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8524 if (hfrEnable && available_hfr_configs.array()) {
8525 available_capabilities.add(
8526 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8527 }
8528
8529 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8530 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8531 }
8532 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8533 available_capabilities.array(),
8534 available_capabilities.size());
8535
8536 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8537 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8538 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8539 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8540
8541 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8542 &aeLockAvailable, 1);
8543
8544 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8545 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8546 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8547 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8548
8549 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8550 &awbLockAvailable, 1);
8551
8552 int32_t max_input_streams = 1;
8553 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8554 &max_input_streams,
8555 1);
8556
8557 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8558 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8559 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8560 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8561 HAL_PIXEL_FORMAT_YCbCr_420_888};
8562 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8563 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8564
8565 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8566 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8567 &max_latency,
8568 1);
8569
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008570#ifndef USE_HAL_3_3
8571 int32_t isp_sensitivity_range[2];
8572 isp_sensitivity_range[0] =
8573 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8574 isp_sensitivity_range[1] =
8575 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8576 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8577 isp_sensitivity_range,
8578 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8579#endif
8580
Thierry Strudel3d639192016-09-09 11:52:26 -07008581 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8582 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8583 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8584 available_hot_pixel_modes,
8585 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8586
8587 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8588 ANDROID_SHADING_MODE_FAST,
8589 ANDROID_SHADING_MODE_HIGH_QUALITY};
8590 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8591 available_shading_modes,
8592 3);
8593
8594 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8595 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8596 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8597 available_lens_shading_map_modes,
8598 2);
8599
8600 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8601 ANDROID_EDGE_MODE_FAST,
8602 ANDROID_EDGE_MODE_HIGH_QUALITY,
8603 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8604 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8605 available_edge_modes,
8606 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8607
8608 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8609 ANDROID_NOISE_REDUCTION_MODE_FAST,
8610 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8611 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8612 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8613 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8614 available_noise_red_modes,
8615 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8616
8617 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8618 ANDROID_TONEMAP_MODE_FAST,
8619 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8620 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8621 available_tonemap_modes,
8622 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8623
8624 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8625 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8626 available_hot_pixel_map_modes,
8627 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8628
8629 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8630 gCamCapability[cameraId]->reference_illuminant1);
8631 if (NAME_NOT_FOUND != val) {
8632 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8633 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8634 }
8635
8636 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8637 gCamCapability[cameraId]->reference_illuminant2);
8638 if (NAME_NOT_FOUND != val) {
8639 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8640 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8641 }
8642
8643 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8644 (void *)gCamCapability[cameraId]->forward_matrix1,
8645 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8646
8647 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8648 (void *)gCamCapability[cameraId]->forward_matrix2,
8649 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8650
8651 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8652 (void *)gCamCapability[cameraId]->color_transform1,
8653 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8654
8655 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8656 (void *)gCamCapability[cameraId]->color_transform2,
8657 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8658
8659 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8660 (void *)gCamCapability[cameraId]->calibration_transform1,
8661 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8662
8663 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8664 (void *)gCamCapability[cameraId]->calibration_transform2,
8665 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8666
8667 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8668 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8669 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8670 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8671 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8672 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8673 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8674 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8675 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8676 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8677 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8678 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8679 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8680 ANDROID_JPEG_GPS_COORDINATES,
8681 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8682 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8683 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8684 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8685 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8686 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8687 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8688 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8689 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8690 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008691#ifndef USE_HAL_3_3
8692 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8693#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008694 ANDROID_STATISTICS_FACE_DETECT_MODE,
8695 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8696 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8697 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008698 ANDROID_BLACK_LEVEL_LOCK,
8699 /* DevCamDebug metadata request_keys_basic */
8700 DEVCAMDEBUG_META_ENABLE,
8701 /* DevCamDebug metadata end */
8702 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008703
8704 size_t request_keys_cnt =
8705 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8706 Vector<int32_t> available_request_keys;
8707 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8708 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8709 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8710 }
8711
8712 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8713 available_request_keys.array(), available_request_keys.size());
8714
8715 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8716 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8717 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8718 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8719 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8720 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8721 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8722 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8723 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8724 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8725 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8726 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8727 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8728 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8729 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8730 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8731 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8732 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8733 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8734 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8735 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008736 ANDROID_STATISTICS_FACE_SCORES,
8737#ifndef USE_HAL_3_3
8738 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8739#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008740 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008741 // DevCamDebug metadata result_keys_basic
8742 DEVCAMDEBUG_META_ENABLE,
8743 // DevCamDebug metadata result_keys AF
8744 DEVCAMDEBUG_AF_LENS_POSITION,
8745 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8746 DEVCAMDEBUG_AF_TOF_DISTANCE,
8747 DEVCAMDEBUG_AF_LUMA,
8748 DEVCAMDEBUG_AF_HAF_STATE,
8749 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
8750 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
8751 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
8752 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
8753 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
8754 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
8755 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
8756 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
8757 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
8758 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
8759 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
8760 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
8761 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
8762 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
8763 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
8764 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
8765 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
8766 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
8767 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
8768 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
8769 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
8770 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
8771 // DevCamDebug metadata result_keys AEC
8772 DEVCAMDEBUG_AEC_TARGET_LUMA,
8773 DEVCAMDEBUG_AEC_COMP_LUMA,
8774 DEVCAMDEBUG_AEC_AVG_LUMA,
8775 DEVCAMDEBUG_AEC_CUR_LUMA,
8776 DEVCAMDEBUG_AEC_LINECOUNT,
8777 DEVCAMDEBUG_AEC_REAL_GAIN,
8778 DEVCAMDEBUG_AEC_EXP_INDEX,
8779 DEVCAMDEBUG_AEC_LUX_IDX,
8780 // DevCamDebug metadata result_keys AWB
8781 DEVCAMDEBUG_AWB_R_GAIN,
8782 DEVCAMDEBUG_AWB_G_GAIN,
8783 DEVCAMDEBUG_AWB_B_GAIN,
8784 DEVCAMDEBUG_AWB_CCT,
8785 DEVCAMDEBUG_AWB_DECISION,
8786 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008787 };
8788
Thierry Strudel3d639192016-09-09 11:52:26 -07008789 size_t result_keys_cnt =
8790 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
8791
8792 Vector<int32_t> available_result_keys;
8793 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
8794 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8795 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
8796 }
8797 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
8798 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
8799 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
8800 }
8801 if (supportedFaceDetectMode == 1) {
8802 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
8803 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
8804 } else if ((supportedFaceDetectMode == 2) ||
8805 (supportedFaceDetectMode == 3)) {
8806 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
8807 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
8808 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008809#ifndef USE_HAL_3_3
8810 if (hasBlackRegions) {
8811 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
8812 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
8813 }
8814#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008815 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8816 available_result_keys.array(), available_result_keys.size());
8817
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008818 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07008819 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8820 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
8821 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
8822 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8823 ANDROID_SCALER_CROPPING_TYPE,
8824 ANDROID_SYNC_MAX_LATENCY,
8825 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8826 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8827 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8828 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
8829 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
8830 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8831 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8832 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8833 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8834 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8835 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8836 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8837 ANDROID_LENS_FACING,
8838 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8839 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8840 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
8841 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8842 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8843 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8844 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8845 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
8846 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
8847 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
8848 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
8849 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
8850 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8851 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8852 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8853 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8854 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
8855 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8856 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8857 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8858 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8859 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8860 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8861 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8862 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8863 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8864 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8865 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8866 ANDROID_TONEMAP_MAX_CURVE_POINTS,
8867 ANDROID_CONTROL_AVAILABLE_MODES,
8868 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8869 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8870 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8871 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008872 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8873#ifndef USE_HAL_3_3
8874 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
8875 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8876#endif
8877 };
8878
8879 Vector<int32_t> available_characteristics_keys;
8880 available_characteristics_keys.appendArray(characteristics_keys_basic,
8881 sizeof(characteristics_keys_basic)/sizeof(int32_t));
8882#ifndef USE_HAL_3_3
8883 if (hasBlackRegions) {
8884 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
8885 }
8886#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008887 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008888 available_characteristics_keys.array(),
8889 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07008890
8891 /*available stall durations depend on the hw + sw and will be different for different devices */
8892 /*have to add for raw after implementation*/
8893 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
8894 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
8895
8896 Vector<int64_t> available_stall_durations;
8897 for (uint32_t j = 0; j < stall_formats_count; j++) {
8898 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
8899 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8900 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8901 available_stall_durations.add(stall_formats[j]);
8902 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8903 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8904 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
8905 }
8906 } else {
8907 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
8908 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8909 available_stall_durations.add(stall_formats[j]);
8910 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8911 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8912 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
8913 }
8914 }
8915 }
8916 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
8917 available_stall_durations.array(),
8918 available_stall_durations.size());
8919
8920 //QCAMERA3_OPAQUE_RAW
8921 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8922 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8923 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
8924 case LEGACY_RAW:
8925 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8926 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
8927 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8928 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
8929 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8930 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
8931 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
8932 break;
8933 case MIPI_RAW:
8934 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
8935 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
8936 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
8937 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
8938 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
8939 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
8940 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
8941 break;
8942 default:
8943 LOGE("unknown opaque_raw_format %d",
8944 gCamCapability[cameraId]->opaque_raw_fmt);
8945 break;
8946 }
8947 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
8948
8949 Vector<int32_t> strides;
8950 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8951 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8952 cam_stream_buf_plane_info_t buf_planes;
8953 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
8954 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
8955 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8956 &gCamCapability[cameraId]->padding_info, &buf_planes);
8957 strides.add(buf_planes.plane_info.mp[0].stride);
8958 }
8959 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
8960 strides.size());
8961
Thierry Strudel04e026f2016-10-10 11:27:36 -07008962 //Video HDR default
8963 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
8964 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
8965 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
8966 int32_t vhdr_mode[] = {
8967 QCAMERA3_VIDEO_HDR_MODE_OFF,
8968 QCAMERA3_VIDEO_HDR_MODE_ON};
8969
8970 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
8971 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
8972 vhdr_mode, vhdr_mode_count);
8973 }
8974
Thierry Strudel3d639192016-09-09 11:52:26 -07008975 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
8976 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
8977 sizeof(gCamCapability[cameraId]->related_cam_calibration));
8978
8979 uint8_t isMonoOnly =
8980 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
8981 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
8982 &isMonoOnly, 1);
8983
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008984#ifndef USE_HAL_3_3
8985 Vector<int32_t> opaque_size;
8986 for (size_t j = 0; j < scalar_formats_count; j++) {
8987 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
8988 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8989 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8990 cam_stream_buf_plane_info_t buf_planes;
8991
8992 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
8993 &gCamCapability[cameraId]->padding_info, &buf_planes);
8994
8995 if (rc == 0) {
8996 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
8997 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
8998 opaque_size.add(buf_planes.plane_info.frame_len);
8999 }else {
9000 LOGE("raw frame calculation failed!");
9001 }
9002 }
9003 }
9004 }
9005
9006 if ((opaque_size.size() > 0) &&
9007 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9008 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9009 else
9010 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9011#endif
9012
Thierry Strudel04e026f2016-10-10 11:27:36 -07009013 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9014 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9015 size = 0;
9016 count = CAM_IR_MODE_MAX;
9017 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9018 for (size_t i = 0; i < count; i++) {
9019 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9020 gCamCapability[cameraId]->supported_ir_modes[i]);
9021 if (NAME_NOT_FOUND != val) {
9022 avail_ir_modes[size] = (int32_t)val;
9023 size++;
9024 }
9025 }
9026 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9027 avail_ir_modes, size);
9028 }
9029
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009030 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9031 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9032 size = 0;
9033 count = CAM_AEC_CONVERGENCE_MAX;
9034 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9035 for (size_t i = 0; i < count; i++) {
9036 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9037 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9038 if (NAME_NOT_FOUND != val) {
9039 available_instant_aec_modes[size] = (int32_t)val;
9040 size++;
9041 }
9042 }
9043 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9044 available_instant_aec_modes, size);
9045 }
9046
Thierry Strudel3d639192016-09-09 11:52:26 -07009047 gStaticMetadata[cameraId] = staticInfo.release();
9048 return rc;
9049}
9050
9051/*===========================================================================
9052 * FUNCTION : makeTable
9053 *
9054 * DESCRIPTION: make a table of sizes
9055 *
9056 * PARAMETERS :
9057 *
9058 *
9059 *==========================================================================*/
9060void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9061 size_t max_size, int32_t *sizeTable)
9062{
9063 size_t j = 0;
9064 if (size > max_size) {
9065 size = max_size;
9066 }
9067 for (size_t i = 0; i < size; i++) {
9068 sizeTable[j] = dimTable[i].width;
9069 sizeTable[j+1] = dimTable[i].height;
9070 j+=2;
9071 }
9072}
9073
9074/*===========================================================================
9075 * FUNCTION : makeFPSTable
9076 *
9077 * DESCRIPTION: make a table of fps ranges
9078 *
9079 * PARAMETERS :
9080 *
9081 *==========================================================================*/
9082void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9083 size_t max_size, int32_t *fpsRangesTable)
9084{
9085 size_t j = 0;
9086 if (size > max_size) {
9087 size = max_size;
9088 }
9089 for (size_t i = 0; i < size; i++) {
9090 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9091 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9092 j+=2;
9093 }
9094}
9095
9096/*===========================================================================
9097 * FUNCTION : makeOverridesList
9098 *
9099 * DESCRIPTION: make a list of scene mode overrides
9100 *
9101 * PARAMETERS :
9102 *
9103 *
9104 *==========================================================================*/
9105void QCamera3HardwareInterface::makeOverridesList(
9106 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9107 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9108{
9109 /*daemon will give a list of overrides for all scene modes.
9110 However we should send the fwk only the overrides for the scene modes
9111 supported by the framework*/
9112 size_t j = 0;
9113 if (size > max_size) {
9114 size = max_size;
9115 }
9116 size_t focus_count = CAM_FOCUS_MODE_MAX;
9117 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9118 focus_count);
9119 for (size_t i = 0; i < size; i++) {
9120 bool supt = false;
9121 size_t index = supported_indexes[i];
9122 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9123 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9124 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9125 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9126 overridesTable[index].awb_mode);
9127 if (NAME_NOT_FOUND != val) {
9128 overridesList[j+1] = (uint8_t)val;
9129 }
9130 uint8_t focus_override = overridesTable[index].af_mode;
9131 for (size_t k = 0; k < focus_count; k++) {
9132 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9133 supt = true;
9134 break;
9135 }
9136 }
9137 if (supt) {
9138 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9139 focus_override);
9140 if (NAME_NOT_FOUND != val) {
9141 overridesList[j+2] = (uint8_t)val;
9142 }
9143 } else {
9144 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9145 }
9146 j+=3;
9147 }
9148}
9149
9150/*===========================================================================
9151 * FUNCTION : filterJpegSizes
9152 *
9153 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9154 * could be downscaled to
9155 *
9156 * PARAMETERS :
9157 *
9158 * RETURN : length of jpegSizes array
9159 *==========================================================================*/
9160
9161size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9162 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9163 uint8_t downscale_factor)
9164{
9165 if (0 == downscale_factor) {
9166 downscale_factor = 1;
9167 }
9168
9169 int32_t min_width = active_array_size.width / downscale_factor;
9170 int32_t min_height = active_array_size.height / downscale_factor;
9171 size_t jpegSizesCnt = 0;
9172 if (processedSizesCnt > maxCount) {
9173 processedSizesCnt = maxCount;
9174 }
9175 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9176 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9177 jpegSizes[jpegSizesCnt] = processedSizes[i];
9178 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9179 jpegSizesCnt += 2;
9180 }
9181 }
9182 return jpegSizesCnt;
9183}
9184
9185/*===========================================================================
9186 * FUNCTION : computeNoiseModelEntryS
9187 *
9188 * DESCRIPTION: function to map a given sensitivity to the S noise
9189 * model parameters in the DNG noise model.
9190 *
9191 * PARAMETERS : sens : the sensor sensitivity
9192 *
9193 ** RETURN : S (sensor amplification) noise
9194 *
9195 *==========================================================================*/
9196double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9197 double s = gCamCapability[mCameraId]->gradient_S * sens +
9198 gCamCapability[mCameraId]->offset_S;
9199 return ((s < 0.0) ? 0.0 : s);
9200}
9201
9202/*===========================================================================
9203 * FUNCTION : computeNoiseModelEntryO
9204 *
9205 * DESCRIPTION: function to map a given sensitivity to the O noise
9206 * model parameters in the DNG noise model.
9207 *
9208 * PARAMETERS : sens : the sensor sensitivity
9209 *
9210 ** RETURN : O (sensor readout) noise
9211 *
9212 *==========================================================================*/
9213double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9214 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9215 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9216 1.0 : (1.0 * sens / max_analog_sens);
9217 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9218 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9219 return ((o < 0.0) ? 0.0 : o);
9220}
9221
9222/*===========================================================================
9223 * FUNCTION : getSensorSensitivity
9224 *
9225 * DESCRIPTION: convert iso_mode to an integer value
9226 *
9227 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9228 *
9229 ** RETURN : sensitivity supported by sensor
9230 *
9231 *==========================================================================*/
9232int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9233{
9234 int32_t sensitivity;
9235
9236 switch (iso_mode) {
9237 case CAM_ISO_MODE_100:
9238 sensitivity = 100;
9239 break;
9240 case CAM_ISO_MODE_200:
9241 sensitivity = 200;
9242 break;
9243 case CAM_ISO_MODE_400:
9244 sensitivity = 400;
9245 break;
9246 case CAM_ISO_MODE_800:
9247 sensitivity = 800;
9248 break;
9249 case CAM_ISO_MODE_1600:
9250 sensitivity = 1600;
9251 break;
9252 default:
9253 sensitivity = -1;
9254 break;
9255 }
9256 return sensitivity;
9257}
9258
9259/*===========================================================================
9260 * FUNCTION : getCamInfo
9261 *
9262 * DESCRIPTION: query camera capabilities
9263 *
9264 * PARAMETERS :
9265 * @cameraId : camera Id
9266 * @info : camera info struct to be filled in with camera capabilities
9267 *
9268 * RETURN : int type of status
9269 * NO_ERROR -- success
9270 * none-zero failure code
9271 *==========================================================================*/
9272int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9273 struct camera_info *info)
9274{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009275 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009276 int rc = 0;
9277
9278 pthread_mutex_lock(&gCamLock);
9279 if (NULL == gCamCapability[cameraId]) {
9280 rc = initCapabilities(cameraId);
9281 if (rc < 0) {
9282 pthread_mutex_unlock(&gCamLock);
9283 return rc;
9284 }
9285 }
9286
9287 if (NULL == gStaticMetadata[cameraId]) {
9288 rc = initStaticMetadata(cameraId);
9289 if (rc < 0) {
9290 pthread_mutex_unlock(&gCamLock);
9291 return rc;
9292 }
9293 }
9294
9295 switch(gCamCapability[cameraId]->position) {
9296 case CAM_POSITION_BACK:
9297 case CAM_POSITION_BACK_AUX:
9298 info->facing = CAMERA_FACING_BACK;
9299 break;
9300
9301 case CAM_POSITION_FRONT:
9302 case CAM_POSITION_FRONT_AUX:
9303 info->facing = CAMERA_FACING_FRONT;
9304 break;
9305
9306 default:
9307 LOGE("Unknown position type %d for camera id:%d",
9308 gCamCapability[cameraId]->position, cameraId);
9309 rc = -1;
9310 break;
9311 }
9312
9313
9314 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009315#ifndef USE_HAL_3_3
9316 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9317#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009318 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009319#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009320 info->static_camera_characteristics = gStaticMetadata[cameraId];
9321
9322 //For now assume both cameras can operate independently.
9323 info->conflicting_devices = NULL;
9324 info->conflicting_devices_length = 0;
9325
9326 //resource cost is 100 * MIN(1.0, m/M),
9327 //where m is throughput requirement with maximum stream configuration
9328 //and M is CPP maximum throughput.
9329 float max_fps = 0.0;
9330 for (uint32_t i = 0;
9331 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9332 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9333 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9334 }
9335 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9336 gCamCapability[cameraId]->active_array_size.width *
9337 gCamCapability[cameraId]->active_array_size.height * max_fps /
9338 gCamCapability[cameraId]->max_pixel_bandwidth;
9339 info->resource_cost = 100 * MIN(1.0, ratio);
9340 LOGI("camera %d resource cost is %d", cameraId,
9341 info->resource_cost);
9342
9343 pthread_mutex_unlock(&gCamLock);
9344 return rc;
9345}
9346
9347/*===========================================================================
9348 * FUNCTION : translateCapabilityToMetadata
9349 *
9350 * DESCRIPTION: translate the capability into camera_metadata_t
9351 *
9352 * PARAMETERS : type of the request
9353 *
9354 *
9355 * RETURN : success: camera_metadata_t*
9356 * failure: NULL
9357 *
9358 *==========================================================================*/
9359camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9360{
9361 if (mDefaultMetadata[type] != NULL) {
9362 return mDefaultMetadata[type];
9363 }
9364 //first time we are handling this request
9365 //fill up the metadata structure using the wrapper class
9366 CameraMetadata settings;
9367 //translate from cam_capability_t to camera_metadata_tag_t
9368 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9369 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9370 int32_t defaultRequestID = 0;
9371 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9372
9373 /* OIS disable */
9374 char ois_prop[PROPERTY_VALUE_MAX];
9375 memset(ois_prop, 0, sizeof(ois_prop));
9376 property_get("persist.camera.ois.disable", ois_prop, "0");
9377 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9378
9379 /* Force video to use OIS */
9380 char videoOisProp[PROPERTY_VALUE_MAX];
9381 memset(videoOisProp, 0, sizeof(videoOisProp));
9382 property_get("persist.camera.ois.video", videoOisProp, "1");
9383 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009384
9385 // Hybrid AE enable/disable
9386 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9387 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9388 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9389 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9390
Thierry Strudel3d639192016-09-09 11:52:26 -07009391 uint8_t controlIntent = 0;
9392 uint8_t focusMode;
9393 uint8_t vsMode;
9394 uint8_t optStabMode;
9395 uint8_t cacMode;
9396 uint8_t edge_mode;
9397 uint8_t noise_red_mode;
9398 uint8_t tonemap_mode;
9399 bool highQualityModeEntryAvailable = FALSE;
9400 bool fastModeEntryAvailable = FALSE;
9401 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9402 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9403 switch (type) {
9404 case CAMERA3_TEMPLATE_PREVIEW:
9405 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9406 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9407 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9408 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9409 edge_mode = ANDROID_EDGE_MODE_FAST;
9410 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9411 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9412 break;
9413 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9414 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9415 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9416 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9417 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9418 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9419 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9420 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9421 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9422 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9423 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9424 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9425 highQualityModeEntryAvailable = TRUE;
9426 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9427 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9428 fastModeEntryAvailable = TRUE;
9429 }
9430 }
9431 if (highQualityModeEntryAvailable) {
9432 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9433 } else if (fastModeEntryAvailable) {
9434 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9435 }
9436 break;
9437 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9438 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9439 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9440 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009441 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9442 edge_mode = ANDROID_EDGE_MODE_FAST;
9443 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9444 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9445 if (forceVideoOis)
9446 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9447 break;
9448 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9449 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9450 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9451 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009452 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9453 edge_mode = ANDROID_EDGE_MODE_FAST;
9454 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9455 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9456 if (forceVideoOis)
9457 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9458 break;
9459 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9460 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9461 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9462 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9463 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9464 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9465 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9466 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9467 break;
9468 case CAMERA3_TEMPLATE_MANUAL:
9469 edge_mode = ANDROID_EDGE_MODE_FAST;
9470 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9471 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9472 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9473 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9474 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9475 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9476 break;
9477 default:
9478 edge_mode = ANDROID_EDGE_MODE_FAST;
9479 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9480 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9481 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9482 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9483 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9484 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9485 break;
9486 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009487 // Set CAC to OFF if underlying device doesn't support
9488 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9489 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9490 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009491 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9492 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9493 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9494 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9495 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9496 }
9497 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9498
9499 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9500 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9501 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9502 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9503 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9504 || ois_disable)
9505 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9506 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9507
9508 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9509 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9510
9511 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9512 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9513
9514 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9515 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9516
9517 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9518 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9519
9520 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9521 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9522
9523 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9524 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9525
9526 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9527 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9528
9529 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9530 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9531
9532 /*flash*/
9533 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9534 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9535
9536 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9537 settings.update(ANDROID_FLASH_FIRING_POWER,
9538 &flashFiringLevel, 1);
9539
9540 /* lens */
9541 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9542 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9543
9544 if (gCamCapability[mCameraId]->filter_densities_count) {
9545 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9546 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9547 gCamCapability[mCameraId]->filter_densities_count);
9548 }
9549
9550 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9551 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9552
9553 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9554 float default_focus_distance = 0;
9555 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9556 }
9557
9558 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9559 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9560
9561 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9562 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9563
9564 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9565 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9566
9567 /* face detection (default to OFF) */
9568 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9569 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9570
9571 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9572 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9573
9574 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9575 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9576
9577 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9578 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9579
9580 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9581 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9582
9583 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9584 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9585
9586 /* Exposure time(Update the Min Exposure Time)*/
9587 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9588 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9589
9590 /* frame duration */
9591 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9592 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9593
9594 /* sensitivity */
9595 static const int32_t default_sensitivity = 100;
9596 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009597#ifndef USE_HAL_3_3
9598 static const int32_t default_isp_sensitivity =
9599 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9600 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9601#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009602
9603 /*edge mode*/
9604 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9605
9606 /*noise reduction mode*/
9607 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9608
9609 /*color correction mode*/
9610 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9611 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9612
9613 /*transform matrix mode*/
9614 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9615
9616 int32_t scaler_crop_region[4];
9617 scaler_crop_region[0] = 0;
9618 scaler_crop_region[1] = 0;
9619 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9620 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9621 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9622
9623 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9624 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9625
9626 /*focus distance*/
9627 float focus_distance = 0.0;
9628 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9629
9630 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009631 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009632 float max_range = 0.0;
9633 float max_fixed_fps = 0.0;
9634 int32_t fps_range[2] = {0, 0};
9635 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9636 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009637 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9638 TEMPLATE_MAX_PREVIEW_FPS) {
9639 continue;
9640 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009641 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9642 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9643 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9644 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9645 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9646 if (range > max_range) {
9647 fps_range[0] =
9648 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9649 fps_range[1] =
9650 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9651 max_range = range;
9652 }
9653 } else {
9654 if (range < 0.01 && max_fixed_fps <
9655 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9656 fps_range[0] =
9657 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9658 fps_range[1] =
9659 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9660 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9661 }
9662 }
9663 }
9664 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9665
9666 /*precapture trigger*/
9667 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9668 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9669
9670 /*af trigger*/
9671 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9672 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9673
9674 /* ae & af regions */
9675 int32_t active_region[] = {
9676 gCamCapability[mCameraId]->active_array_size.left,
9677 gCamCapability[mCameraId]->active_array_size.top,
9678 gCamCapability[mCameraId]->active_array_size.left +
9679 gCamCapability[mCameraId]->active_array_size.width,
9680 gCamCapability[mCameraId]->active_array_size.top +
9681 gCamCapability[mCameraId]->active_array_size.height,
9682 0};
9683 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9684 sizeof(active_region) / sizeof(active_region[0]));
9685 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9686 sizeof(active_region) / sizeof(active_region[0]));
9687
9688 /* black level lock */
9689 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9690 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9691
9692 /* lens shading map mode */
9693 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9694 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9695 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9696 }
9697 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9698
9699 //special defaults for manual template
9700 if (type == CAMERA3_TEMPLATE_MANUAL) {
9701 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9702 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9703
9704 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9705 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9706
9707 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9708 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9709
9710 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9711 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9712
9713 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9714 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9715
9716 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9717 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9718 }
9719
9720
9721 /* TNR
9722 * We'll use this location to determine which modes TNR will be set.
9723 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9724 * This is not to be confused with linking on a per stream basis that decision
9725 * is still on per-session basis and will be handled as part of config stream
9726 */
9727 uint8_t tnr_enable = 0;
9728
9729 if (m_bTnrPreview || m_bTnrVideo) {
9730
9731 switch (type) {
9732 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9733 tnr_enable = 1;
9734 break;
9735
9736 default:
9737 tnr_enable = 0;
9738 break;
9739 }
9740
9741 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9742 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9743 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9744
9745 LOGD("TNR:%d with process plate %d for template:%d",
9746 tnr_enable, tnr_process_type, type);
9747 }
9748
9749 //Update Link tags to default
9750 int32_t sync_type = CAM_TYPE_STANDALONE;
9751 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
9752
9753 int32_t is_main = 0; //this doesn't matter as app should overwrite
9754 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
9755
9756 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
9757
9758 /* CDS default */
9759 char prop[PROPERTY_VALUE_MAX];
9760 memset(prop, 0, sizeof(prop));
9761 property_get("persist.camera.CDS", prop, "Auto");
9762 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
9763 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
9764 if (CAM_CDS_MODE_MAX == cds_mode) {
9765 cds_mode = CAM_CDS_MODE_AUTO;
9766 }
9767
9768 /* Disabling CDS in templates which have TNR enabled*/
9769 if (tnr_enable)
9770 cds_mode = CAM_CDS_MODE_OFF;
9771
9772 int32_t mode = cds_mode;
9773 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07009774
9775 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
9776 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
9777
9778 /* IR Mode Default Off */
9779 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
9780 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
9781
Thierry Strudel269c81a2016-10-12 12:13:59 -07009782 /* Manual Convergence AEC Speed is disabled by default*/
9783 float default_aec_speed = 0;
9784 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
9785
9786 /* Manual Convergence AWB Speed is disabled by default*/
9787 float default_awb_speed = 0;
9788 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
9789
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009790 // Set instant AEC to normal convergence by default
9791 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
9792 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
9793
Shuzhen Wang19463d72016-03-08 11:09:52 -08009794 /* hybrid ae */
9795 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
9796
Thierry Strudel3d639192016-09-09 11:52:26 -07009797 mDefaultMetadata[type] = settings.release();
9798
9799 return mDefaultMetadata[type];
9800}
9801
9802/*===========================================================================
9803 * FUNCTION : setFrameParameters
9804 *
9805 * DESCRIPTION: set parameters per frame as requested in the metadata from
9806 * framework
9807 *
9808 * PARAMETERS :
9809 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009810 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -07009811 * @blob_request: Whether this request is a blob request or not
9812 *
9813 * RETURN : success: NO_ERROR
9814 * failure:
9815 *==========================================================================*/
9816int QCamera3HardwareInterface::setFrameParameters(
9817 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009818 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -07009819 int blob_request,
9820 uint32_t snapshotStreamId)
9821{
9822 /*translate from camera_metadata_t type to parm_type_t*/
9823 int rc = 0;
9824 int32_t hal_version = CAM_HAL_V3;
9825
9826 clear_metadata_buffer(mParameters);
9827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
9828 LOGE("Failed to set hal version in the parameters");
9829 return BAD_VALUE;
9830 }
9831
9832 /*we need to update the frame number in the parameters*/
9833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
9834 request->frame_number)) {
9835 LOGE("Failed to set the frame number in the parameters");
9836 return BAD_VALUE;
9837 }
9838
9839 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009841 LOGE("Failed to set stream type mask in the parameters");
9842 return BAD_VALUE;
9843 }
9844
9845 if (mUpdateDebugLevel) {
9846 uint32_t dummyDebugLevel = 0;
9847 /* The value of dummyDebugLevel is irrelavent. On
9848 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
9849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
9850 dummyDebugLevel)) {
9851 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
9852 return BAD_VALUE;
9853 }
9854 mUpdateDebugLevel = false;
9855 }
9856
9857 if(request->settings != NULL){
9858 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
9859 if (blob_request)
9860 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
9861 }
9862
9863 return rc;
9864}
9865
9866/*===========================================================================
9867 * FUNCTION : setReprocParameters
9868 *
9869 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
9870 * return it.
9871 *
9872 * PARAMETERS :
9873 * @request : request that needs to be serviced
9874 *
9875 * RETURN : success: NO_ERROR
9876 * failure:
9877 *==========================================================================*/
9878int32_t QCamera3HardwareInterface::setReprocParameters(
9879 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
9880 uint32_t snapshotStreamId)
9881{
9882 /*translate from camera_metadata_t type to parm_type_t*/
9883 int rc = 0;
9884
9885 if (NULL == request->settings){
9886 LOGE("Reprocess settings cannot be NULL");
9887 return BAD_VALUE;
9888 }
9889
9890 if (NULL == reprocParam) {
9891 LOGE("Invalid reprocessing metadata buffer");
9892 return BAD_VALUE;
9893 }
9894 clear_metadata_buffer(reprocParam);
9895
9896 /*we need to update the frame number in the parameters*/
9897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
9898 request->frame_number)) {
9899 LOGE("Failed to set the frame number in the parameters");
9900 return BAD_VALUE;
9901 }
9902
9903 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
9904 if (rc < 0) {
9905 LOGE("Failed to translate reproc request");
9906 return rc;
9907 }
9908
9909 CameraMetadata frame_settings;
9910 frame_settings = request->settings;
9911 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
9912 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
9913 int32_t *crop_count =
9914 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
9915 int32_t *crop_data =
9916 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
9917 int32_t *roi_map =
9918 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
9919 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
9920 cam_crop_data_t crop_meta;
9921 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
9922 crop_meta.num_of_streams = 1;
9923 crop_meta.crop_info[0].crop.left = crop_data[0];
9924 crop_meta.crop_info[0].crop.top = crop_data[1];
9925 crop_meta.crop_info[0].crop.width = crop_data[2];
9926 crop_meta.crop_info[0].crop.height = crop_data[3];
9927
9928 crop_meta.crop_info[0].roi_map.left =
9929 roi_map[0];
9930 crop_meta.crop_info[0].roi_map.top =
9931 roi_map[1];
9932 crop_meta.crop_info[0].roi_map.width =
9933 roi_map[2];
9934 crop_meta.crop_info[0].roi_map.height =
9935 roi_map[3];
9936
9937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
9938 rc = BAD_VALUE;
9939 }
9940 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
9941 request->input_buffer->stream,
9942 crop_meta.crop_info[0].crop.left,
9943 crop_meta.crop_info[0].crop.top,
9944 crop_meta.crop_info[0].crop.width,
9945 crop_meta.crop_info[0].crop.height);
9946 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
9947 request->input_buffer->stream,
9948 crop_meta.crop_info[0].roi_map.left,
9949 crop_meta.crop_info[0].roi_map.top,
9950 crop_meta.crop_info[0].roi_map.width,
9951 crop_meta.crop_info[0].roi_map.height);
9952 } else {
9953 LOGE("Invalid reprocess crop count %d!", *crop_count);
9954 }
9955 } else {
9956 LOGE("No crop data from matching output stream");
9957 }
9958
9959 /* These settings are not needed for regular requests so handle them specially for
9960 reprocess requests; information needed for EXIF tags */
9961 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9962 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9963 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9964 if (NAME_NOT_FOUND != val) {
9965 uint32_t flashMode = (uint32_t)val;
9966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
9967 rc = BAD_VALUE;
9968 }
9969 } else {
9970 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
9971 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9972 }
9973 } else {
9974 LOGH("No flash mode in reprocess settings");
9975 }
9976
9977 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
9978 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
9979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
9980 rc = BAD_VALUE;
9981 }
9982 } else {
9983 LOGH("No flash state in reprocess settings");
9984 }
9985
9986 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
9987 uint8_t *reprocessFlags =
9988 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
9989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
9990 *reprocessFlags)) {
9991 rc = BAD_VALUE;
9992 }
9993 }
9994
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009995 // Add metadata which reprocess needs
9996 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
9997 cam_reprocess_info_t *repro_info =
9998 (cam_reprocess_info_t *)frame_settings.find
9999 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010001 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010002 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010003 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010004 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010005 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010006 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010007 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010009 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010010 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010011 repro_info->pipeline_flip);
10012 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10013 repro_info->af_roi);
10014 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10015 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010016 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10017 CAM_INTF_PARM_ROTATION metadata then has been added in
10018 translateToHalMetadata. HAL need to keep this new rotation
10019 metadata. Otherwise, the old rotation info saved in the vendor tag
10020 would be used */
10021 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10022 CAM_INTF_PARM_ROTATION, reprocParam) {
10023 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10024 } else {
10025 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010026 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010027 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010028 }
10029
10030 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10031 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10032 roi.width and roi.height would be the final JPEG size.
10033 For now, HAL only checks this for reprocess request */
10034 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10035 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10036 uint8_t *enable =
10037 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10038 if (*enable == TRUE) {
10039 int32_t *crop_data =
10040 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10041 cam_stream_crop_info_t crop_meta;
10042 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10043 crop_meta.stream_id = 0;
10044 crop_meta.crop.left = crop_data[0];
10045 crop_meta.crop.top = crop_data[1];
10046 crop_meta.crop.width = crop_data[2];
10047 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010048 // The JPEG crop roi should match cpp output size
10049 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10050 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10051 crop_meta.roi_map.left = 0;
10052 crop_meta.roi_map.top = 0;
10053 crop_meta.roi_map.width = cpp_crop->crop.width;
10054 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010055 }
10056 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10057 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010058 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010059 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010060 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10061 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010062 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010063 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10064
10065 // Add JPEG scale information
10066 cam_dimension_t scale_dim;
10067 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10068 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10069 int32_t *roi =
10070 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10071 scale_dim.width = roi[2];
10072 scale_dim.height = roi[3];
10073 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10074 scale_dim);
10075 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10076 scale_dim.width, scale_dim.height, mCameraId);
10077 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010078 }
10079 }
10080
10081 return rc;
10082}
10083
10084/*===========================================================================
10085 * FUNCTION : saveRequestSettings
10086 *
10087 * DESCRIPTION: Add any settings that might have changed to the request settings
10088 * and save the settings to be applied on the frame
10089 *
10090 * PARAMETERS :
10091 * @jpegMetadata : the extracted and/or modified jpeg metadata
10092 * @request : request with initial settings
10093 *
10094 * RETURN :
10095 * camera_metadata_t* : pointer to the saved request settings
10096 *==========================================================================*/
10097camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10098 const CameraMetadata &jpegMetadata,
10099 camera3_capture_request_t *request)
10100{
10101 camera_metadata_t *resultMetadata;
10102 CameraMetadata camMetadata;
10103 camMetadata = request->settings;
10104
10105 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10106 int32_t thumbnail_size[2];
10107 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10108 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10109 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10110 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10111 }
10112
10113 if (request->input_buffer != NULL) {
10114 uint8_t reprocessFlags = 1;
10115 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10116 (uint8_t*)&reprocessFlags,
10117 sizeof(reprocessFlags));
10118 }
10119
10120 resultMetadata = camMetadata.release();
10121 return resultMetadata;
10122}
10123
10124/*===========================================================================
10125 * FUNCTION : setHalFpsRange
10126 *
10127 * DESCRIPTION: set FPS range parameter
10128 *
10129 *
10130 * PARAMETERS :
10131 * @settings : Metadata from framework
10132 * @hal_metadata: Metadata buffer
10133 *
10134 *
10135 * RETURN : success: NO_ERROR
10136 * failure:
10137 *==========================================================================*/
10138int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10139 metadata_buffer_t *hal_metadata)
10140{
10141 int32_t rc = NO_ERROR;
10142 cam_fps_range_t fps_range;
10143 fps_range.min_fps = (float)
10144 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10145 fps_range.max_fps = (float)
10146 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10147 fps_range.video_min_fps = fps_range.min_fps;
10148 fps_range.video_max_fps = fps_range.max_fps;
10149
10150 LOGD("aeTargetFpsRange fps: [%f %f]",
10151 fps_range.min_fps, fps_range.max_fps);
10152 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10153 * follows:
10154 * ---------------------------------------------------------------|
10155 * Video stream is absent in configure_streams |
10156 * (Camcorder preview before the first video record |
10157 * ---------------------------------------------------------------|
10158 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10159 * | | | vid_min/max_fps|
10160 * ---------------------------------------------------------------|
10161 * NO | [ 30, 240] | 240 | [240, 240] |
10162 * |-------------|-------------|----------------|
10163 * | [240, 240] | 240 | [240, 240] |
10164 * ---------------------------------------------------------------|
10165 * Video stream is present in configure_streams |
10166 * ---------------------------------------------------------------|
10167 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10168 * | | | vid_min/max_fps|
10169 * ---------------------------------------------------------------|
10170 * NO | [ 30, 240] | 240 | [240, 240] |
10171 * (camcorder prev |-------------|-------------|----------------|
10172 * after video rec | [240, 240] | 240 | [240, 240] |
10173 * is stopped) | | | |
10174 * ---------------------------------------------------------------|
10175 * YES | [ 30, 240] | 240 | [240, 240] |
10176 * |-------------|-------------|----------------|
10177 * | [240, 240] | 240 | [240, 240] |
10178 * ---------------------------------------------------------------|
10179 * When Video stream is absent in configure_streams,
10180 * preview fps = sensor_fps / batchsize
10181 * Eg: for 240fps at batchSize 4, preview = 60fps
10182 * for 120fps at batchSize 4, preview = 30fps
10183 *
10184 * When video stream is present in configure_streams, preview fps is as per
10185 * the ratio of preview buffers to video buffers requested in process
10186 * capture request
10187 */
10188 mBatchSize = 0;
10189 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10190 fps_range.min_fps = fps_range.video_max_fps;
10191 fps_range.video_min_fps = fps_range.video_max_fps;
10192 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10193 fps_range.max_fps);
10194 if (NAME_NOT_FOUND != val) {
10195 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10196 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10197 return BAD_VALUE;
10198 }
10199
10200 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10201 /* If batchmode is currently in progress and the fps changes,
10202 * set the flag to restart the sensor */
10203 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10204 (mHFRVideoFps != fps_range.max_fps)) {
10205 mNeedSensorRestart = true;
10206 }
10207 mHFRVideoFps = fps_range.max_fps;
10208 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10209 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10210 mBatchSize = MAX_HFR_BATCH_SIZE;
10211 }
10212 }
10213 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10214
10215 }
10216 } else {
10217 /* HFR mode is session param in backend/ISP. This should be reset when
10218 * in non-HFR mode */
10219 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10221 return BAD_VALUE;
10222 }
10223 }
10224 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10225 return BAD_VALUE;
10226 }
10227 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10228 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10229 return rc;
10230}
10231
10232/*===========================================================================
10233 * FUNCTION : translateToHalMetadata
10234 *
10235 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10236 *
10237 *
10238 * PARAMETERS :
10239 * @request : request sent from framework
10240 *
10241 *
10242 * RETURN : success: NO_ERROR
10243 * failure:
10244 *==========================================================================*/
10245int QCamera3HardwareInterface::translateToHalMetadata
10246 (const camera3_capture_request_t *request,
10247 metadata_buffer_t *hal_metadata,
10248 uint32_t snapshotStreamId)
10249{
10250 int rc = 0;
10251 CameraMetadata frame_settings;
10252 frame_settings = request->settings;
10253
10254 /* Do not change the order of the following list unless you know what you are
10255 * doing.
10256 * The order is laid out in such a way that parameters in the front of the table
10257 * may be used to override the parameters later in the table. Examples are:
10258 * 1. META_MODE should precede AEC/AWB/AF MODE
10259 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10260 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10261 * 4. Any mode should precede it's corresponding settings
10262 */
10263 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10264 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10266 rc = BAD_VALUE;
10267 }
10268 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10269 if (rc != NO_ERROR) {
10270 LOGE("extractSceneMode failed");
10271 }
10272 }
10273
10274 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10275 uint8_t fwk_aeMode =
10276 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10277 uint8_t aeMode;
10278 int32_t redeye;
10279
10280 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10281 aeMode = CAM_AE_MODE_OFF;
10282 } else {
10283 aeMode = CAM_AE_MODE_ON;
10284 }
10285 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10286 redeye = 1;
10287 } else {
10288 redeye = 0;
10289 }
10290
10291 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10292 fwk_aeMode);
10293 if (NAME_NOT_FOUND != val) {
10294 int32_t flashMode = (int32_t)val;
10295 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10296 }
10297
10298 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10299 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10300 rc = BAD_VALUE;
10301 }
10302 }
10303
10304 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10305 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10306 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10307 fwk_whiteLevel);
10308 if (NAME_NOT_FOUND != val) {
10309 uint8_t whiteLevel = (uint8_t)val;
10310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10311 rc = BAD_VALUE;
10312 }
10313 }
10314 }
10315
10316 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10317 uint8_t fwk_cacMode =
10318 frame_settings.find(
10319 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10320 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10321 fwk_cacMode);
10322 if (NAME_NOT_FOUND != val) {
10323 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10324 bool entryAvailable = FALSE;
10325 // Check whether Frameworks set CAC mode is supported in device or not
10326 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10327 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10328 entryAvailable = TRUE;
10329 break;
10330 }
10331 }
10332 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10333 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10334 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10335 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10336 if (entryAvailable == FALSE) {
10337 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10338 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10339 } else {
10340 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10341 // High is not supported and so set the FAST as spec say's underlying
10342 // device implementation can be the same for both modes.
10343 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10344 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10345 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10346 // in order to avoid the fps drop due to high quality
10347 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10348 } else {
10349 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10350 }
10351 }
10352 }
10353 LOGD("Final cacMode is %d", cacMode);
10354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10355 rc = BAD_VALUE;
10356 }
10357 } else {
10358 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10359 }
10360 }
10361
10362 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10363 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10364 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10365 fwk_focusMode);
10366 if (NAME_NOT_FOUND != val) {
10367 uint8_t focusMode = (uint8_t)val;
10368 LOGD("set focus mode %d", focusMode);
10369 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10370 rc = BAD_VALUE;
10371 }
10372 }
10373 }
10374
10375 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10376 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10378 focalDistance)) {
10379 rc = BAD_VALUE;
10380 }
10381 }
10382
10383 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10384 uint8_t fwk_antibandingMode =
10385 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10386 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10387 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10388 if (NAME_NOT_FOUND != val) {
10389 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010390 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10391 if (m60HzZone) {
10392 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10393 } else {
10394 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10395 }
10396 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010397 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10398 hal_antibandingMode)) {
10399 rc = BAD_VALUE;
10400 }
10401 }
10402 }
10403
10404 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10405 int32_t expCompensation = frame_settings.find(
10406 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10407 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10408 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10409 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10410 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010411 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10413 expCompensation)) {
10414 rc = BAD_VALUE;
10415 }
10416 }
10417
10418 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10419 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10420 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10421 rc = BAD_VALUE;
10422 }
10423 }
10424 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10425 rc = setHalFpsRange(frame_settings, hal_metadata);
10426 if (rc != NO_ERROR) {
10427 LOGE("setHalFpsRange failed");
10428 }
10429 }
10430
10431 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10432 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10433 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10434 rc = BAD_VALUE;
10435 }
10436 }
10437
10438 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10439 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10440 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10441 fwk_effectMode);
10442 if (NAME_NOT_FOUND != val) {
10443 uint8_t effectMode = (uint8_t)val;
10444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10445 rc = BAD_VALUE;
10446 }
10447 }
10448 }
10449
10450 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10451 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10453 colorCorrectMode)) {
10454 rc = BAD_VALUE;
10455 }
10456 }
10457
10458 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10459 cam_color_correct_gains_t colorCorrectGains;
10460 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10461 colorCorrectGains.gains[i] =
10462 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10463 }
10464 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10465 colorCorrectGains)) {
10466 rc = BAD_VALUE;
10467 }
10468 }
10469
10470 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10471 cam_color_correct_matrix_t colorCorrectTransform;
10472 cam_rational_type_t transform_elem;
10473 size_t num = 0;
10474 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10475 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10476 transform_elem.numerator =
10477 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10478 transform_elem.denominator =
10479 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10480 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10481 num++;
10482 }
10483 }
10484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10485 colorCorrectTransform)) {
10486 rc = BAD_VALUE;
10487 }
10488 }
10489
10490 cam_trigger_t aecTrigger;
10491 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10492 aecTrigger.trigger_id = -1;
10493 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10494 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10495 aecTrigger.trigger =
10496 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10497 aecTrigger.trigger_id =
10498 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10499 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10500 aecTrigger)) {
10501 rc = BAD_VALUE;
10502 }
10503 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10504 aecTrigger.trigger, aecTrigger.trigger_id);
10505 }
10506
10507 /*af_trigger must come with a trigger id*/
10508 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10509 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10510 cam_trigger_t af_trigger;
10511 af_trigger.trigger =
10512 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10513 af_trigger.trigger_id =
10514 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10515 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10516 rc = BAD_VALUE;
10517 }
10518 LOGD("AfTrigger: %d AfTriggerID: %d",
10519 af_trigger.trigger, af_trigger.trigger_id);
10520 }
10521
10522 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10523 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10525 rc = BAD_VALUE;
10526 }
10527 }
10528 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10529 cam_edge_application_t edge_application;
10530 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10531 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10532 edge_application.sharpness = 0;
10533 } else {
10534 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10535 }
10536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10537 rc = BAD_VALUE;
10538 }
10539 }
10540
10541 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10542 int32_t respectFlashMode = 1;
10543 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10544 uint8_t fwk_aeMode =
10545 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10546 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10547 respectFlashMode = 0;
10548 LOGH("AE Mode controls flash, ignore android.flash.mode");
10549 }
10550 }
10551 if (respectFlashMode) {
10552 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10553 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10554 LOGH("flash mode after mapping %d", val);
10555 // To check: CAM_INTF_META_FLASH_MODE usage
10556 if (NAME_NOT_FOUND != val) {
10557 uint8_t flashMode = (uint8_t)val;
10558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10559 rc = BAD_VALUE;
10560 }
10561 }
10562 }
10563 }
10564
10565 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10566 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10568 rc = BAD_VALUE;
10569 }
10570 }
10571
10572 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10573 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10574 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10575 flashFiringTime)) {
10576 rc = BAD_VALUE;
10577 }
10578 }
10579
10580 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10581 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10583 hotPixelMode)) {
10584 rc = BAD_VALUE;
10585 }
10586 }
10587
10588 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10589 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10590 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10591 lensAperture)) {
10592 rc = BAD_VALUE;
10593 }
10594 }
10595
10596 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10597 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10599 filterDensity)) {
10600 rc = BAD_VALUE;
10601 }
10602 }
10603
10604 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10605 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10607 focalLength)) {
10608 rc = BAD_VALUE;
10609 }
10610 }
10611
10612 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10613 uint8_t optStabMode =
10614 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10615 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10616 optStabMode)) {
10617 rc = BAD_VALUE;
10618 }
10619 }
10620
10621 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10622 uint8_t videoStabMode =
10623 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10624 LOGD("videoStabMode from APP = %d", videoStabMode);
10625 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10626 videoStabMode)) {
10627 rc = BAD_VALUE;
10628 }
10629 }
10630
10631
10632 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10633 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10635 noiseRedMode)) {
10636 rc = BAD_VALUE;
10637 }
10638 }
10639
10640 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10641 float reprocessEffectiveExposureFactor =
10642 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10644 reprocessEffectiveExposureFactor)) {
10645 rc = BAD_VALUE;
10646 }
10647 }
10648
10649 cam_crop_region_t scalerCropRegion;
10650 bool scalerCropSet = false;
10651 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10652 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10653 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10654 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10655 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10656
10657 // Map coordinate system from active array to sensor output.
10658 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10659 scalerCropRegion.width, scalerCropRegion.height);
10660
10661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10662 scalerCropRegion)) {
10663 rc = BAD_VALUE;
10664 }
10665 scalerCropSet = true;
10666 }
10667
10668 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10669 int64_t sensorExpTime =
10670 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10671 LOGD("setting sensorExpTime %lld", sensorExpTime);
10672 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10673 sensorExpTime)) {
10674 rc = BAD_VALUE;
10675 }
10676 }
10677
10678 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10679 int64_t sensorFrameDuration =
10680 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10681 int64_t minFrameDuration = getMinFrameDuration(request);
10682 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10683 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10684 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10685 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10687 sensorFrameDuration)) {
10688 rc = BAD_VALUE;
10689 }
10690 }
10691
10692 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10693 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10694 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10695 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10696 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10697 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10698 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10699 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10700 sensorSensitivity)) {
10701 rc = BAD_VALUE;
10702 }
10703 }
10704
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010705#ifndef USE_HAL_3_3
10706 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10707 int32_t ispSensitivity =
10708 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10709 if (ispSensitivity <
10710 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10711 ispSensitivity =
10712 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10713 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10714 }
10715 if (ispSensitivity >
10716 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10717 ispSensitivity =
10718 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10719 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10720 }
10721 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10722 ispSensitivity)) {
10723 rc = BAD_VALUE;
10724 }
10725 }
10726#endif
10727
Thierry Strudel3d639192016-09-09 11:52:26 -070010728 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10729 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10730 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10731 rc = BAD_VALUE;
10732 }
10733 }
10734
10735 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10736 uint8_t fwk_facedetectMode =
10737 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10738
10739 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10740 fwk_facedetectMode);
10741
10742 if (NAME_NOT_FOUND != val) {
10743 uint8_t facedetectMode = (uint8_t)val;
10744 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10745 facedetectMode)) {
10746 rc = BAD_VALUE;
10747 }
10748 }
10749 }
10750
10751 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
10752 uint8_t histogramMode =
10753 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
10754 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
10755 histogramMode)) {
10756 rc = BAD_VALUE;
10757 }
10758 }
10759
10760 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
10761 uint8_t sharpnessMapMode =
10762 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
10763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
10764 sharpnessMapMode)) {
10765 rc = BAD_VALUE;
10766 }
10767 }
10768
10769 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
10770 uint8_t tonemapMode =
10771 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
10772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
10773 rc = BAD_VALUE;
10774 }
10775 }
10776 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
10777 /*All tonemap channels will have the same number of points*/
10778 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
10779 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
10780 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
10781 cam_rgb_tonemap_curves tonemapCurves;
10782 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
10783 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
10784 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
10785 tonemapCurves.tonemap_points_cnt,
10786 CAM_MAX_TONEMAP_CURVE_SIZE);
10787 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
10788 }
10789
10790 /* ch0 = G*/
10791 size_t point = 0;
10792 cam_tonemap_curve_t tonemapCurveGreen;
10793 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10794 for (size_t j = 0; j < 2; j++) {
10795 tonemapCurveGreen.tonemap_points[i][j] =
10796 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
10797 point++;
10798 }
10799 }
10800 tonemapCurves.curves[0] = tonemapCurveGreen;
10801
10802 /* ch 1 = B */
10803 point = 0;
10804 cam_tonemap_curve_t tonemapCurveBlue;
10805 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10806 for (size_t j = 0; j < 2; j++) {
10807 tonemapCurveBlue.tonemap_points[i][j] =
10808 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
10809 point++;
10810 }
10811 }
10812 tonemapCurves.curves[1] = tonemapCurveBlue;
10813
10814 /* ch 2 = R */
10815 point = 0;
10816 cam_tonemap_curve_t tonemapCurveRed;
10817 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
10818 for (size_t j = 0; j < 2; j++) {
10819 tonemapCurveRed.tonemap_points[i][j] =
10820 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
10821 point++;
10822 }
10823 }
10824 tonemapCurves.curves[2] = tonemapCurveRed;
10825
10826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
10827 tonemapCurves)) {
10828 rc = BAD_VALUE;
10829 }
10830 }
10831
10832 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
10833 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
10834 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
10835 captureIntent)) {
10836 rc = BAD_VALUE;
10837 }
10838 }
10839
10840 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
10841 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
10842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
10843 blackLevelLock)) {
10844 rc = BAD_VALUE;
10845 }
10846 }
10847
10848 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
10849 uint8_t lensShadingMapMode =
10850 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
10851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
10852 lensShadingMapMode)) {
10853 rc = BAD_VALUE;
10854 }
10855 }
10856
10857 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
10858 cam_area_t roi;
10859 bool reset = true;
10860 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
10861
10862 // Map coordinate system from active array to sensor output.
10863 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10864 roi.rect.height);
10865
10866 if (scalerCropSet) {
10867 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10868 }
10869 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
10870 rc = BAD_VALUE;
10871 }
10872 }
10873
10874 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
10875 cam_area_t roi;
10876 bool reset = true;
10877 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
10878
10879 // Map coordinate system from active array to sensor output.
10880 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
10881 roi.rect.height);
10882
10883 if (scalerCropSet) {
10884 reset = resetIfNeededROI(&roi, &scalerCropRegion);
10885 }
10886 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
10887 rc = BAD_VALUE;
10888 }
10889 }
10890
10891 // CDS for non-HFR non-video mode
10892 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
10893 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
10894 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
10895 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
10896 LOGE("Invalid CDS mode %d!", *fwk_cds);
10897 } else {
10898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10899 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
10900 rc = BAD_VALUE;
10901 }
10902 }
10903 }
10904
Thierry Strudel04e026f2016-10-10 11:27:36 -070010905 // Video HDR
10906 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
10907 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
10908 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
10909 rc = setVideoHdrMode(mParameters, vhdr);
10910 if (rc != NO_ERROR) {
10911 LOGE("setVideoHDR is failed");
10912 }
10913 }
10914
10915 //IR
10916 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
10917 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
10918 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
10919 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
10920 LOGE("Invalid IR mode %d!", fwk_ir);
10921 } else {
10922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10923 CAM_INTF_META_IR_MODE, fwk_ir)) {
10924 rc = BAD_VALUE;
10925 }
10926 }
10927 }
10928
Thierry Strudel269c81a2016-10-12 12:13:59 -070010929 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
10930 float aec_speed;
10931 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
10932 LOGD("AEC Speed :%f", aec_speed);
10933 if ( aec_speed < 0 ) {
10934 LOGE("Invalid AEC mode %f!", aec_speed);
10935 } else {
10936 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
10937 aec_speed)) {
10938 rc = BAD_VALUE;
10939 }
10940 }
10941 }
10942
10943 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
10944 float awb_speed;
10945 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
10946 LOGD("AWB Speed :%f", awb_speed);
10947 if ( awb_speed < 0 ) {
10948 LOGE("Invalid AWB mode %f!", awb_speed);
10949 } else {
10950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
10951 awb_speed)) {
10952 rc = BAD_VALUE;
10953 }
10954 }
10955 }
10956
Thierry Strudel3d639192016-09-09 11:52:26 -070010957 // TNR
10958 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
10959 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
10960 uint8_t b_TnrRequested = 0;
10961 cam_denoise_param_t tnr;
10962 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
10963 tnr.process_plates =
10964 (cam_denoise_process_type_t)frame_settings.find(
10965 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
10966 b_TnrRequested = tnr.denoise_enable;
10967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
10968 rc = BAD_VALUE;
10969 }
10970 }
10971
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010972 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
10973 int32_t* exposure_metering_mode =
10974 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
10975 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
10976 *exposure_metering_mode)) {
10977 rc = BAD_VALUE;
10978 }
10979 }
10980
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
10982 int32_t fwk_testPatternMode =
10983 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
10984 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
10985 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
10986
10987 if (NAME_NOT_FOUND != testPatternMode) {
10988 cam_test_pattern_data_t testPatternData;
10989 memset(&testPatternData, 0, sizeof(testPatternData));
10990 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
10991 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
10992 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
10993 int32_t *fwk_testPatternData =
10994 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
10995 testPatternData.r = fwk_testPatternData[0];
10996 testPatternData.b = fwk_testPatternData[3];
10997 switch (gCamCapability[mCameraId]->color_arrangement) {
10998 case CAM_FILTER_ARRANGEMENT_RGGB:
10999 case CAM_FILTER_ARRANGEMENT_GRBG:
11000 testPatternData.gr = fwk_testPatternData[1];
11001 testPatternData.gb = fwk_testPatternData[2];
11002 break;
11003 case CAM_FILTER_ARRANGEMENT_GBRG:
11004 case CAM_FILTER_ARRANGEMENT_BGGR:
11005 testPatternData.gr = fwk_testPatternData[2];
11006 testPatternData.gb = fwk_testPatternData[1];
11007 break;
11008 default:
11009 LOGE("color arrangement %d is not supported",
11010 gCamCapability[mCameraId]->color_arrangement);
11011 break;
11012 }
11013 }
11014 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11015 testPatternData)) {
11016 rc = BAD_VALUE;
11017 }
11018 } else {
11019 LOGE("Invalid framework sensor test pattern mode %d",
11020 fwk_testPatternMode);
11021 }
11022 }
11023
11024 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11025 size_t count = 0;
11026 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11027 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11028 gps_coords.data.d, gps_coords.count, count);
11029 if (gps_coords.count != count) {
11030 rc = BAD_VALUE;
11031 }
11032 }
11033
11034 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11035 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11036 size_t count = 0;
11037 const char *gps_methods_src = (const char *)
11038 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11039 memset(gps_methods, '\0', sizeof(gps_methods));
11040 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11041 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11042 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11043 if (GPS_PROCESSING_METHOD_SIZE != count) {
11044 rc = BAD_VALUE;
11045 }
11046 }
11047
11048 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11049 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11050 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11051 gps_timestamp)) {
11052 rc = BAD_VALUE;
11053 }
11054 }
11055
11056 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11057 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11058 cam_rotation_info_t rotation_info;
11059 if (orientation == 0) {
11060 rotation_info.rotation = ROTATE_0;
11061 } else if (orientation == 90) {
11062 rotation_info.rotation = ROTATE_90;
11063 } else if (orientation == 180) {
11064 rotation_info.rotation = ROTATE_180;
11065 } else if (orientation == 270) {
11066 rotation_info.rotation = ROTATE_270;
11067 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011068 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011069 rotation_info.streamId = snapshotStreamId;
11070 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11072 rc = BAD_VALUE;
11073 }
11074 }
11075
11076 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11077 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11079 rc = BAD_VALUE;
11080 }
11081 }
11082
11083 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11084 uint32_t thumb_quality = (uint32_t)
11085 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11086 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11087 thumb_quality)) {
11088 rc = BAD_VALUE;
11089 }
11090 }
11091
11092 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11093 cam_dimension_t dim;
11094 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11095 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11097 rc = BAD_VALUE;
11098 }
11099 }
11100
11101 // Internal metadata
11102 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11103 size_t count = 0;
11104 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11105 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11106 privatedata.data.i32, privatedata.count, count);
11107 if (privatedata.count != count) {
11108 rc = BAD_VALUE;
11109 }
11110 }
11111
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011112 // ISO/Exposure Priority
11113 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11114 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11115 cam_priority_mode_t mode =
11116 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11117 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11118 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11119 use_iso_exp_pty.previewOnly = FALSE;
11120 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11121 use_iso_exp_pty.value = *ptr;
11122
11123 if(CAM_ISO_PRIORITY == mode) {
11124 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11125 use_iso_exp_pty)) {
11126 rc = BAD_VALUE;
11127 }
11128 }
11129 else {
11130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11131 use_iso_exp_pty)) {
11132 rc = BAD_VALUE;
11133 }
11134 }
11135 }
11136 }
11137
11138 // Saturation
11139 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11140 int32_t* use_saturation =
11141 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11143 rc = BAD_VALUE;
11144 }
11145 }
11146
Thierry Strudel3d639192016-09-09 11:52:26 -070011147 // EV step
11148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11149 gCamCapability[mCameraId]->exp_compensation_step)) {
11150 rc = BAD_VALUE;
11151 }
11152
11153 // CDS info
11154 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11155 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11156 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11157
11158 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11159 CAM_INTF_META_CDS_DATA, *cdsData)) {
11160 rc = BAD_VALUE;
11161 }
11162 }
11163
Shuzhen Wang19463d72016-03-08 11:09:52 -080011164 // Hybrid AE
11165 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11166 uint8_t *hybrid_ae = (uint8_t *)
11167 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11168
11169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11170 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11171 rc = BAD_VALUE;
11172 }
11173 }
11174
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 return rc;
11176}
11177
11178/*===========================================================================
11179 * FUNCTION : captureResultCb
11180 *
11181 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11182 *
11183 * PARAMETERS :
11184 * @frame : frame information from mm-camera-interface
11185 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11186 * @userdata: userdata
11187 *
11188 * RETURN : NONE
11189 *==========================================================================*/
11190void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11191 camera3_stream_buffer_t *buffer,
11192 uint32_t frame_number, bool isInputBuffer, void *userdata)
11193{
11194 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11195 if (hw == NULL) {
11196 LOGE("Invalid hw %p", hw);
11197 return;
11198 }
11199
11200 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11201 return;
11202}
11203
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011204/*===========================================================================
11205 * FUNCTION : setBufferErrorStatus
11206 *
11207 * DESCRIPTION: Callback handler for channels to report any buffer errors
11208 *
11209 * PARAMETERS :
11210 * @ch : Channel on which buffer error is reported from
11211 * @frame_number : frame number on which buffer error is reported on
11212 * @buffer_status : buffer error status
11213 * @userdata: userdata
11214 *
11215 * RETURN : NONE
11216 *==========================================================================*/
11217void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11218 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11219{
11220 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11221 if (hw == NULL) {
11222 LOGE("Invalid hw %p", hw);
11223 return;
11224 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011225
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011226 hw->setBufferErrorStatus(ch, frame_number, err);
11227 return;
11228}
11229
11230void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11231 uint32_t frameNumber, camera3_buffer_status_t err)
11232{
11233 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11234 pthread_mutex_lock(&mMutex);
11235
11236 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11237 if (req.frame_number != frameNumber)
11238 continue;
11239 for (auto& k : req.mPendingBufferList) {
11240 if(k.stream->priv == ch) {
11241 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11242 }
11243 }
11244 }
11245
11246 pthread_mutex_unlock(&mMutex);
11247 return;
11248}
Thierry Strudel3d639192016-09-09 11:52:26 -070011249/*===========================================================================
11250 * FUNCTION : initialize
11251 *
11252 * DESCRIPTION: Pass framework callback pointers to HAL
11253 *
11254 * PARAMETERS :
11255 *
11256 *
11257 * RETURN : Success : 0
11258 * Failure: -ENODEV
11259 *==========================================================================*/
11260
11261int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11262 const camera3_callback_ops_t *callback_ops)
11263{
11264 LOGD("E");
11265 QCamera3HardwareInterface *hw =
11266 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11267 if (!hw) {
11268 LOGE("NULL camera device");
11269 return -ENODEV;
11270 }
11271
11272 int rc = hw->initialize(callback_ops);
11273 LOGD("X");
11274 return rc;
11275}
11276
11277/*===========================================================================
11278 * FUNCTION : configure_streams
11279 *
11280 * DESCRIPTION:
11281 *
11282 * PARAMETERS :
11283 *
11284 *
11285 * RETURN : Success: 0
11286 * Failure: -EINVAL (if stream configuration is invalid)
11287 * -ENODEV (fatal error)
11288 *==========================================================================*/
11289
11290int QCamera3HardwareInterface::configure_streams(
11291 const struct camera3_device *device,
11292 camera3_stream_configuration_t *stream_list)
11293{
11294 LOGD("E");
11295 QCamera3HardwareInterface *hw =
11296 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11297 if (!hw) {
11298 LOGE("NULL camera device");
11299 return -ENODEV;
11300 }
11301 int rc = hw->configureStreams(stream_list);
11302 LOGD("X");
11303 return rc;
11304}
11305
11306/*===========================================================================
11307 * FUNCTION : construct_default_request_settings
11308 *
11309 * DESCRIPTION: Configure a settings buffer to meet the required use case
11310 *
11311 * PARAMETERS :
11312 *
11313 *
11314 * RETURN : Success: Return valid metadata
11315 * Failure: Return NULL
11316 *==========================================================================*/
11317const camera_metadata_t* QCamera3HardwareInterface::
11318 construct_default_request_settings(const struct camera3_device *device,
11319 int type)
11320{
11321
11322 LOGD("E");
11323 camera_metadata_t* fwk_metadata = NULL;
11324 QCamera3HardwareInterface *hw =
11325 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11326 if (!hw) {
11327 LOGE("NULL camera device");
11328 return NULL;
11329 }
11330
11331 fwk_metadata = hw->translateCapabilityToMetadata(type);
11332
11333 LOGD("X");
11334 return fwk_metadata;
11335}
11336
11337/*===========================================================================
11338 * FUNCTION : process_capture_request
11339 *
11340 * DESCRIPTION:
11341 *
11342 * PARAMETERS :
11343 *
11344 *
11345 * RETURN :
11346 *==========================================================================*/
11347int QCamera3HardwareInterface::process_capture_request(
11348 const struct camera3_device *device,
11349 camera3_capture_request_t *request)
11350{
11351 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011352 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011353 QCamera3HardwareInterface *hw =
11354 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11355 if (!hw) {
11356 LOGE("NULL camera device");
11357 return -EINVAL;
11358 }
11359
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011360 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011361 LOGD("X");
11362 return rc;
11363}
11364
11365/*===========================================================================
11366 * FUNCTION : dump
11367 *
11368 * DESCRIPTION:
11369 *
11370 * PARAMETERS :
11371 *
11372 *
11373 * RETURN :
11374 *==========================================================================*/
11375
11376void QCamera3HardwareInterface::dump(
11377 const struct camera3_device *device, int fd)
11378{
11379 /* Log level property is read when "adb shell dumpsys media.camera" is
11380 called so that the log level can be controlled without restarting
11381 the media server */
11382 getLogLevel();
11383
11384 LOGD("E");
11385 QCamera3HardwareInterface *hw =
11386 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11387 if (!hw) {
11388 LOGE("NULL camera device");
11389 return;
11390 }
11391
11392 hw->dump(fd);
11393 LOGD("X");
11394 return;
11395}
11396
11397/*===========================================================================
11398 * FUNCTION : flush
11399 *
11400 * DESCRIPTION:
11401 *
11402 * PARAMETERS :
11403 *
11404 *
11405 * RETURN :
11406 *==========================================================================*/
11407
11408int QCamera3HardwareInterface::flush(
11409 const struct camera3_device *device)
11410{
11411 int rc;
11412 LOGD("E");
11413 QCamera3HardwareInterface *hw =
11414 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11415 if (!hw) {
11416 LOGE("NULL camera device");
11417 return -EINVAL;
11418 }
11419
11420 pthread_mutex_lock(&hw->mMutex);
11421 // Validate current state
11422 switch (hw->mState) {
11423 case STARTED:
11424 /* valid state */
11425 break;
11426
11427 case ERROR:
11428 pthread_mutex_unlock(&hw->mMutex);
11429 hw->handleCameraDeviceError();
11430 return -ENODEV;
11431
11432 default:
11433 LOGI("Flush returned during state %d", hw->mState);
11434 pthread_mutex_unlock(&hw->mMutex);
11435 return 0;
11436 }
11437 pthread_mutex_unlock(&hw->mMutex);
11438
11439 rc = hw->flush(true /* restart channels */ );
11440 LOGD("X");
11441 return rc;
11442}
11443
11444/*===========================================================================
11445 * FUNCTION : close_camera_device
11446 *
11447 * DESCRIPTION:
11448 *
11449 * PARAMETERS :
11450 *
11451 *
11452 * RETURN :
11453 *==========================================================================*/
11454int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11455{
11456 int ret = NO_ERROR;
11457 QCamera3HardwareInterface *hw =
11458 reinterpret_cast<QCamera3HardwareInterface *>(
11459 reinterpret_cast<camera3_device_t *>(device)->priv);
11460 if (!hw) {
11461 LOGE("NULL camera device");
11462 return BAD_VALUE;
11463 }
11464
11465 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11466 delete hw;
11467 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011468 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011469 return ret;
11470}
11471
11472/*===========================================================================
11473 * FUNCTION : getWaveletDenoiseProcessPlate
11474 *
11475 * DESCRIPTION: query wavelet denoise process plate
11476 *
11477 * PARAMETERS : None
11478 *
11479 * RETURN : WNR prcocess plate value
11480 *==========================================================================*/
11481cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11482{
11483 char prop[PROPERTY_VALUE_MAX];
11484 memset(prop, 0, sizeof(prop));
11485 property_get("persist.denoise.process.plates", prop, "0");
11486 int processPlate = atoi(prop);
11487 switch(processPlate) {
11488 case 0:
11489 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11490 case 1:
11491 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11492 case 2:
11493 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11494 case 3:
11495 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11496 default:
11497 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11498 }
11499}
11500
11501
11502/*===========================================================================
11503 * FUNCTION : getTemporalDenoiseProcessPlate
11504 *
11505 * DESCRIPTION: query temporal denoise process plate
11506 *
11507 * PARAMETERS : None
11508 *
11509 * RETURN : TNR prcocess plate value
11510 *==========================================================================*/
11511cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11512{
11513 char prop[PROPERTY_VALUE_MAX];
11514 memset(prop, 0, sizeof(prop));
11515 property_get("persist.tnr.process.plates", prop, "0");
11516 int processPlate = atoi(prop);
11517 switch(processPlate) {
11518 case 0:
11519 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11520 case 1:
11521 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11522 case 2:
11523 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11524 case 3:
11525 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11526 default:
11527 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11528 }
11529}
11530
11531
11532/*===========================================================================
11533 * FUNCTION : extractSceneMode
11534 *
11535 * DESCRIPTION: Extract scene mode from frameworks set metadata
11536 *
11537 * PARAMETERS :
11538 * @frame_settings: CameraMetadata reference
11539 * @metaMode: ANDROID_CONTORL_MODE
11540 * @hal_metadata: hal metadata structure
11541 *
11542 * RETURN : None
11543 *==========================================================================*/
11544int32_t QCamera3HardwareInterface::extractSceneMode(
11545 const CameraMetadata &frame_settings, uint8_t metaMode,
11546 metadata_buffer_t *hal_metadata)
11547{
11548 int32_t rc = NO_ERROR;
11549
11550 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11551 camera_metadata_ro_entry entry =
11552 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11553 if (0 == entry.count)
11554 return rc;
11555
11556 uint8_t fwk_sceneMode = entry.data.u8[0];
11557
11558 int val = lookupHalName(SCENE_MODES_MAP,
11559 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11560 fwk_sceneMode);
11561 if (NAME_NOT_FOUND != val) {
11562 uint8_t sceneMode = (uint8_t)val;
11563 LOGD("sceneMode: %d", sceneMode);
11564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11565 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11566 rc = BAD_VALUE;
11567 }
11568 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011569
11570 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11571 cam_hdr_param_t hdr_params;
11572 hdr_params.hdr_enable = 1;
11573 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11574 hdr_params.hdr_need_1x = false;
11575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11576 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11577 rc = BAD_VALUE;
11578 }
11579 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011580 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11581 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11582 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11583 LOGD("sceneMode: %d", sceneMode);
11584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11585 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11586 rc = BAD_VALUE;
11587 }
11588 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011589
11590 if (mForceHdrSnapshot) {
11591 cam_hdr_param_t hdr_params;
11592 hdr_params.hdr_enable = 1;
11593 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11594 hdr_params.hdr_need_1x = false;
11595 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11596 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11597 rc = BAD_VALUE;
11598 }
11599 }
11600
Thierry Strudel3d639192016-09-09 11:52:26 -070011601 return rc;
11602}
11603
11604/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011605 * FUNCTION : setVideoHdrMode
11606 *
11607 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11608 *
11609 * PARAMETERS :
11610 * @hal_metadata: hal metadata structure
11611 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11612 *
11613 * RETURN : None
11614 *==========================================================================*/
11615int32_t QCamera3HardwareInterface::setVideoHdrMode(
11616 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11617{
11618 int32_t rc = NO_ERROR;
11619 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11620 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11621 rc = BAD_VALUE;
11622 } else {
11623 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11624 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11625 LOGD("Setting HDR mode Off");
11626 vhdr_type = CAM_SENSOR_HDR_OFF;
11627 } else {
11628 char video_hdr_prop[PROPERTY_VALUE_MAX];
11629 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11630 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11631 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11632 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11633 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11634 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11635 LOGD("Setting HDR mode In Sensor");
11636 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11637 }
11638 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11639 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11640 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11641 LOGD("Setting HDR mode Zigzag");
11642 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11643 }
11644 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11645 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11646 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11647 LOGD("Setting HDR mode Staggered");
11648 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11649 }
11650 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11651 LOGD("HDR mode not supported");
11652 rc = BAD_VALUE;
11653 }
11654 }
11655 if(rc == NO_ERROR) {
11656 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11657 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11658 rc = BAD_VALUE;
11659 }
11660 }
11661 }
11662 return rc;
11663}
11664
11665/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011666 * FUNCTION : needRotationReprocess
11667 *
11668 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11669 *
11670 * PARAMETERS : none
11671 *
11672 * RETURN : true: needed
11673 * false: no need
11674 *==========================================================================*/
11675bool QCamera3HardwareInterface::needRotationReprocess()
11676{
11677 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11678 // current rotation is not zero, and pp has the capability to process rotation
11679 LOGH("need do reprocess for rotation");
11680 return true;
11681 }
11682
11683 return false;
11684}
11685
11686/*===========================================================================
11687 * FUNCTION : needReprocess
11688 *
11689 * DESCRIPTION: if reprocess in needed
11690 *
11691 * PARAMETERS : none
11692 *
11693 * RETURN : true: needed
11694 * false: no need
11695 *==========================================================================*/
11696bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11697{
11698 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11699 // TODO: add for ZSL HDR later
11700 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11701 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11702 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11703 return true;
11704 } else {
11705 LOGH("already post processed frame");
11706 return false;
11707 }
11708 }
11709 return needRotationReprocess();
11710}
11711
11712/*===========================================================================
11713 * FUNCTION : needJpegExifRotation
11714 *
11715 * DESCRIPTION: if rotation from jpeg is needed
11716 *
11717 * PARAMETERS : none
11718 *
11719 * RETURN : true: needed
11720 * false: no need
11721 *==========================================================================*/
11722bool QCamera3HardwareInterface::needJpegExifRotation()
11723{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011724 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011725 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11726 LOGD("Need use Jpeg EXIF Rotation");
11727 return true;
11728 }
11729 return false;
11730}
11731
11732/*===========================================================================
11733 * FUNCTION : addOfflineReprocChannel
11734 *
11735 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11736 * coming from input channel
11737 *
11738 * PARAMETERS :
11739 * @config : reprocess configuration
11740 * @inputChHandle : pointer to the input (source) channel
11741 *
11742 *
11743 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11744 *==========================================================================*/
11745QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11746 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
11747{
11748 int32_t rc = NO_ERROR;
11749 QCamera3ReprocessChannel *pChannel = NULL;
11750
11751 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011752 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
11753 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070011754 if (NULL == pChannel) {
11755 LOGE("no mem for reprocess channel");
11756 return NULL;
11757 }
11758
11759 rc = pChannel->initialize(IS_TYPE_NONE);
11760 if (rc != NO_ERROR) {
11761 LOGE("init reprocess channel failed, ret = %d", rc);
11762 delete pChannel;
11763 return NULL;
11764 }
11765
11766 // pp feature config
11767 cam_pp_feature_config_t pp_config;
11768 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
11769
11770 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
11771 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
11772 & CAM_QCOM_FEATURE_DSDN) {
11773 //Use CPP CDS incase h/w supports it.
11774 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
11775 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
11776 }
11777 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11778 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
11779 }
11780
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011781 if (config.hdr_param.hdr_enable) {
11782 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11783 pp_config.hdr_param = config.hdr_param;
11784 }
11785
11786 if (mForceHdrSnapshot) {
11787 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
11788 pp_config.hdr_param.hdr_enable = 1;
11789 pp_config.hdr_param.hdr_need_1x = 0;
11790 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11791 }
11792
Thierry Strudel3d639192016-09-09 11:52:26 -070011793 rc = pChannel->addReprocStreamsFromSource(pp_config,
11794 config,
11795 IS_TYPE_NONE,
11796 mMetadataChannel);
11797
11798 if (rc != NO_ERROR) {
11799 delete pChannel;
11800 return NULL;
11801 }
11802 return pChannel;
11803}
11804
11805/*===========================================================================
11806 * FUNCTION : getMobicatMask
11807 *
11808 * DESCRIPTION: returns mobicat mask
11809 *
11810 * PARAMETERS : none
11811 *
11812 * RETURN : mobicat mask
11813 *
11814 *==========================================================================*/
11815uint8_t QCamera3HardwareInterface::getMobicatMask()
11816{
11817 return m_MobicatMask;
11818}
11819
11820/*===========================================================================
11821 * FUNCTION : setMobicat
11822 *
11823 * DESCRIPTION: set Mobicat on/off.
11824 *
11825 * PARAMETERS :
11826 * @params : none
11827 *
11828 * RETURN : int32_t type of status
11829 * NO_ERROR -- success
11830 * none-zero failure code
11831 *==========================================================================*/
11832int32_t QCamera3HardwareInterface::setMobicat()
11833{
11834 char value [PROPERTY_VALUE_MAX];
11835 property_get("persist.camera.mobicat", value, "0");
11836 int32_t ret = NO_ERROR;
11837 uint8_t enableMobi = (uint8_t)atoi(value);
11838
11839 if (enableMobi) {
11840 tune_cmd_t tune_cmd;
11841 tune_cmd.type = SET_RELOAD_CHROMATIX;
11842 tune_cmd.module = MODULE_ALL;
11843 tune_cmd.value = TRUE;
11844 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11845 CAM_INTF_PARM_SET_VFE_COMMAND,
11846 tune_cmd);
11847
11848 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
11849 CAM_INTF_PARM_SET_PP_COMMAND,
11850 tune_cmd);
11851 }
11852 m_MobicatMask = enableMobi;
11853
11854 return ret;
11855}
11856
11857/*===========================================================================
11858* FUNCTION : getLogLevel
11859*
11860* DESCRIPTION: Reads the log level property into a variable
11861*
11862* PARAMETERS :
11863* None
11864*
11865* RETURN :
11866* None
11867*==========================================================================*/
11868void QCamera3HardwareInterface::getLogLevel()
11869{
11870 char prop[PROPERTY_VALUE_MAX];
11871 uint32_t globalLogLevel = 0;
11872
11873 property_get("persist.camera.hal.debug", prop, "0");
11874 int val = atoi(prop);
11875 if (0 <= val) {
11876 gCamHal3LogLevel = (uint32_t)val;
11877 }
11878
Thierry Strudel9ec39c62016-12-28 11:30:05 -080011879 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070011880 gKpiDebugLevel = atoi(prop);
11881
11882 property_get("persist.camera.global.debug", prop, "0");
11883 val = atoi(prop);
11884 if (0 <= val) {
11885 globalLogLevel = (uint32_t)val;
11886 }
11887
11888 /* Highest log level among hal.logs and global.logs is selected */
11889 if (gCamHal3LogLevel < globalLogLevel)
11890 gCamHal3LogLevel = globalLogLevel;
11891
11892 return;
11893}
11894
11895/*===========================================================================
11896 * FUNCTION : validateStreamRotations
11897 *
11898 * DESCRIPTION: Check if the rotations requested are supported
11899 *
11900 * PARAMETERS :
11901 * @stream_list : streams to be configured
11902 *
11903 * RETURN : NO_ERROR on success
11904 * -EINVAL on failure
11905 *
11906 *==========================================================================*/
11907int QCamera3HardwareInterface::validateStreamRotations(
11908 camera3_stream_configuration_t *streamList)
11909{
11910 int rc = NO_ERROR;
11911
11912 /*
11913 * Loop through all streams requested in configuration
11914 * Check if unsupported rotations have been requested on any of them
11915 */
11916 for (size_t j = 0; j < streamList->num_streams; j++){
11917 camera3_stream_t *newStream = streamList->streams[j];
11918
11919 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
11920 bool isImplDef = (newStream->format ==
11921 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
11922 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
11923 isImplDef);
11924
11925 if (isRotated && (!isImplDef || isZsl)) {
11926 LOGE("Error: Unsupported rotation of %d requested for stream"
11927 "type:%d and stream format:%d",
11928 newStream->rotation, newStream->stream_type,
11929 newStream->format);
11930 rc = -EINVAL;
11931 break;
11932 }
11933 }
11934
11935 return rc;
11936}
11937
11938/*===========================================================================
11939* FUNCTION : getFlashInfo
11940*
11941* DESCRIPTION: Retrieve information about whether the device has a flash.
11942*
11943* PARAMETERS :
11944* @cameraId : Camera id to query
11945* @hasFlash : Boolean indicating whether there is a flash device
11946* associated with given camera
11947* @flashNode : If a flash device exists, this will be its device node.
11948*
11949* RETURN :
11950* None
11951*==========================================================================*/
11952void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
11953 bool& hasFlash,
11954 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
11955{
11956 cam_capability_t* camCapability = gCamCapability[cameraId];
11957 if (NULL == camCapability) {
11958 hasFlash = false;
11959 flashNode[0] = '\0';
11960 } else {
11961 hasFlash = camCapability->flash_available;
11962 strlcpy(flashNode,
11963 (char*)camCapability->flash_dev_name,
11964 QCAMERA_MAX_FILEPATH_LENGTH);
11965 }
11966}
11967
11968/*===========================================================================
11969* FUNCTION : getEepromVersionInfo
11970*
11971* DESCRIPTION: Retrieve version info of the sensor EEPROM data
11972*
11973* PARAMETERS : None
11974*
11975* RETURN : string describing EEPROM version
11976* "\0" if no such info available
11977*==========================================================================*/
11978const char *QCamera3HardwareInterface::getEepromVersionInfo()
11979{
11980 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
11981}
11982
11983/*===========================================================================
11984* FUNCTION : getLdafCalib
11985*
11986* DESCRIPTION: Retrieve Laser AF calibration data
11987*
11988* PARAMETERS : None
11989*
11990* RETURN : Two uint32_t describing laser AF calibration data
11991* NULL if none is available.
11992*==========================================================================*/
11993const uint32_t *QCamera3HardwareInterface::getLdafCalib()
11994{
11995 if (mLdafCalibExist) {
11996 return &mLdafCalib[0];
11997 } else {
11998 return NULL;
11999 }
12000}
12001
12002/*===========================================================================
12003 * FUNCTION : dynamicUpdateMetaStreamInfo
12004 *
12005 * DESCRIPTION: This function:
12006 * (1) stops all the channels
12007 * (2) returns error on pending requests and buffers
12008 * (3) sends metastream_info in setparams
12009 * (4) starts all channels
12010 * This is useful when sensor has to be restarted to apply any
12011 * settings such as frame rate from a different sensor mode
12012 *
12013 * PARAMETERS : None
12014 *
12015 * RETURN : NO_ERROR on success
12016 * Error codes on failure
12017 *
12018 *==========================================================================*/
12019int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12020{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012021 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 int rc = NO_ERROR;
12023
12024 LOGD("E");
12025
12026 rc = stopAllChannels();
12027 if (rc < 0) {
12028 LOGE("stopAllChannels failed");
12029 return rc;
12030 }
12031
12032 rc = notifyErrorForPendingRequests();
12033 if (rc < 0) {
12034 LOGE("notifyErrorForPendingRequests failed");
12035 return rc;
12036 }
12037
12038 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12039 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12040 "Format:%d",
12041 mStreamConfigInfo.type[i],
12042 mStreamConfigInfo.stream_sizes[i].width,
12043 mStreamConfigInfo.stream_sizes[i].height,
12044 mStreamConfigInfo.postprocess_mask[i],
12045 mStreamConfigInfo.format[i]);
12046 }
12047
12048 /* Send meta stream info once again so that ISP can start */
12049 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12050 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12051 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12052 mParameters);
12053 if (rc < 0) {
12054 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12055 }
12056
12057 rc = startAllChannels();
12058 if (rc < 0) {
12059 LOGE("startAllChannels failed");
12060 return rc;
12061 }
12062
12063 LOGD("X");
12064 return rc;
12065}
12066
12067/*===========================================================================
12068 * FUNCTION : stopAllChannels
12069 *
12070 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12071 *
12072 * PARAMETERS : None
12073 *
12074 * RETURN : NO_ERROR on success
12075 * Error codes on failure
12076 *
12077 *==========================================================================*/
12078int32_t QCamera3HardwareInterface::stopAllChannels()
12079{
12080 int32_t rc = NO_ERROR;
12081
12082 LOGD("Stopping all channels");
12083 // Stop the Streams/Channels
12084 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12085 it != mStreamInfo.end(); it++) {
12086 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12087 if (channel) {
12088 channel->stop();
12089 }
12090 (*it)->status = INVALID;
12091 }
12092
12093 if (mSupportChannel) {
12094 mSupportChannel->stop();
12095 }
12096 if (mAnalysisChannel) {
12097 mAnalysisChannel->stop();
12098 }
12099 if (mRawDumpChannel) {
12100 mRawDumpChannel->stop();
12101 }
12102 if (mMetadataChannel) {
12103 /* If content of mStreamInfo is not 0, there is metadata stream */
12104 mMetadataChannel->stop();
12105 }
12106
12107 LOGD("All channels stopped");
12108 return rc;
12109}
12110
12111/*===========================================================================
12112 * FUNCTION : startAllChannels
12113 *
12114 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12115 *
12116 * PARAMETERS : None
12117 *
12118 * RETURN : NO_ERROR on success
12119 * Error codes on failure
12120 *
12121 *==========================================================================*/
12122int32_t QCamera3HardwareInterface::startAllChannels()
12123{
12124 int32_t rc = NO_ERROR;
12125
12126 LOGD("Start all channels ");
12127 // Start the Streams/Channels
12128 if (mMetadataChannel) {
12129 /* If content of mStreamInfo is not 0, there is metadata stream */
12130 rc = mMetadataChannel->start();
12131 if (rc < 0) {
12132 LOGE("META channel start failed");
12133 return rc;
12134 }
12135 }
12136 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12137 it != mStreamInfo.end(); it++) {
12138 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12139 if (channel) {
12140 rc = channel->start();
12141 if (rc < 0) {
12142 LOGE("channel start failed");
12143 return rc;
12144 }
12145 }
12146 }
12147 if (mAnalysisChannel) {
12148 mAnalysisChannel->start();
12149 }
12150 if (mSupportChannel) {
12151 rc = mSupportChannel->start();
12152 if (rc < 0) {
12153 LOGE("Support channel start failed");
12154 return rc;
12155 }
12156 }
12157 if (mRawDumpChannel) {
12158 rc = mRawDumpChannel->start();
12159 if (rc < 0) {
12160 LOGE("RAW dump channel start failed");
12161 return rc;
12162 }
12163 }
12164
12165 LOGD("All channels started");
12166 return rc;
12167}
12168
12169/*===========================================================================
12170 * FUNCTION : notifyErrorForPendingRequests
12171 *
12172 * DESCRIPTION: This function sends error for all the pending requests/buffers
12173 *
12174 * PARAMETERS : None
12175 *
12176 * RETURN : Error codes
12177 * NO_ERROR on success
12178 *
12179 *==========================================================================*/
12180int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12181{
12182 int32_t rc = NO_ERROR;
12183 unsigned int frameNum = 0;
12184 camera3_capture_result_t result;
12185 camera3_stream_buffer_t *pStream_Buf = NULL;
12186
12187 memset(&result, 0, sizeof(camera3_capture_result_t));
12188
12189 if (mPendingRequestsList.size() > 0) {
12190 pendingRequestIterator i = mPendingRequestsList.begin();
12191 frameNum = i->frame_number;
12192 } else {
12193 /* There might still be pending buffers even though there are
12194 no pending requests. Setting the frameNum to MAX so that
12195 all the buffers with smaller frame numbers are returned */
12196 frameNum = UINT_MAX;
12197 }
12198
12199 LOGH("Oldest frame num on mPendingRequestsList = %u",
12200 frameNum);
12201
12202 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12203 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12204
12205 if (req->frame_number < frameNum) {
12206 // Send Error notify to frameworks for each buffer for which
12207 // metadata buffer is already sent
12208 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12209 req->frame_number, req->mPendingBufferList.size());
12210
12211 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12212 if (NULL == pStream_Buf) {
12213 LOGE("No memory for pending buffers array");
12214 return NO_MEMORY;
12215 }
12216 memset(pStream_Buf, 0,
12217 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12218 result.result = NULL;
12219 result.frame_number = req->frame_number;
12220 result.num_output_buffers = req->mPendingBufferList.size();
12221 result.output_buffers = pStream_Buf;
12222
12223 size_t index = 0;
12224 for (auto info = req->mPendingBufferList.begin();
12225 info != req->mPendingBufferList.end(); ) {
12226
12227 camera3_notify_msg_t notify_msg;
12228 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12229 notify_msg.type = CAMERA3_MSG_ERROR;
12230 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12231 notify_msg.message.error.error_stream = info->stream;
12232 notify_msg.message.error.frame_number = req->frame_number;
12233 pStream_Buf[index].acquire_fence = -1;
12234 pStream_Buf[index].release_fence = -1;
12235 pStream_Buf[index].buffer = info->buffer;
12236 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12237 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012238 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012239 index++;
12240 // Remove buffer from list
12241 info = req->mPendingBufferList.erase(info);
12242 }
12243
12244 // Remove this request from Map
12245 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12246 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12247 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12248
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012249 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012250
12251 delete [] pStream_Buf;
12252 } else {
12253
12254 // Go through the pending requests info and send error request to framework
12255 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12256
12257 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12258
12259 // Send error notify to frameworks
12260 camera3_notify_msg_t notify_msg;
12261 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12262 notify_msg.type = CAMERA3_MSG_ERROR;
12263 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12264 notify_msg.message.error.error_stream = NULL;
12265 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012266 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012267
12268 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12269 if (NULL == pStream_Buf) {
12270 LOGE("No memory for pending buffers array");
12271 return NO_MEMORY;
12272 }
12273 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12274
12275 result.result = NULL;
12276 result.frame_number = req->frame_number;
12277 result.input_buffer = i->input_buffer;
12278 result.num_output_buffers = req->mPendingBufferList.size();
12279 result.output_buffers = pStream_Buf;
12280
12281 size_t index = 0;
12282 for (auto info = req->mPendingBufferList.begin();
12283 info != req->mPendingBufferList.end(); ) {
12284 pStream_Buf[index].acquire_fence = -1;
12285 pStream_Buf[index].release_fence = -1;
12286 pStream_Buf[index].buffer = info->buffer;
12287 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12288 pStream_Buf[index].stream = info->stream;
12289 index++;
12290 // Remove buffer from list
12291 info = req->mPendingBufferList.erase(info);
12292 }
12293
12294 // Remove this request from Map
12295 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12296 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12297 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12298
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012299 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012300 delete [] pStream_Buf;
12301 i = erasePendingRequest(i);
12302 }
12303 }
12304
12305 /* Reset pending frame Drop list and requests list */
12306 mPendingFrameDropList.clear();
12307
12308 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12309 req.mPendingBufferList.clear();
12310 }
12311 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12312 mPendingReprocessResultList.clear();
12313 LOGH("Cleared all the pending buffers ");
12314
12315 return rc;
12316}
12317
12318bool QCamera3HardwareInterface::isOnEncoder(
12319 const cam_dimension_t max_viewfinder_size,
12320 uint32_t width, uint32_t height)
12321{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012322 return ((width > (uint32_t)max_viewfinder_size.width) ||
12323 (height > (uint32_t)max_viewfinder_size.height) ||
12324 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12325 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012326}
12327
12328/*===========================================================================
12329 * FUNCTION : setBundleInfo
12330 *
12331 * DESCRIPTION: Set bundle info for all streams that are bundle.
12332 *
12333 * PARAMETERS : None
12334 *
12335 * RETURN : NO_ERROR on success
12336 * Error codes on failure
12337 *==========================================================================*/
12338int32_t QCamera3HardwareInterface::setBundleInfo()
12339{
12340 int32_t rc = NO_ERROR;
12341
12342 if (mChannelHandle) {
12343 cam_bundle_config_t bundleInfo;
12344 memset(&bundleInfo, 0, sizeof(bundleInfo));
12345 rc = mCameraHandle->ops->get_bundle_info(
12346 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12347 if (rc != NO_ERROR) {
12348 LOGE("get_bundle_info failed");
12349 return rc;
12350 }
12351 if (mAnalysisChannel) {
12352 mAnalysisChannel->setBundleInfo(bundleInfo);
12353 }
12354 if (mSupportChannel) {
12355 mSupportChannel->setBundleInfo(bundleInfo);
12356 }
12357 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12358 it != mStreamInfo.end(); it++) {
12359 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12360 channel->setBundleInfo(bundleInfo);
12361 }
12362 if (mRawDumpChannel) {
12363 mRawDumpChannel->setBundleInfo(bundleInfo);
12364 }
12365 }
12366
12367 return rc;
12368}
12369
12370/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012371 * FUNCTION : setInstantAEC
12372 *
12373 * DESCRIPTION: Set Instant AEC related params.
12374 *
12375 * PARAMETERS :
12376 * @meta: CameraMetadata reference
12377 *
12378 * RETURN : NO_ERROR on success
12379 * Error codes on failure
12380 *==========================================================================*/
12381int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12382{
12383 int32_t rc = NO_ERROR;
12384 uint8_t val = 0;
12385 char prop[PROPERTY_VALUE_MAX];
12386
12387 // First try to configure instant AEC from framework metadata
12388 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12389 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12390 }
12391
12392 // If framework did not set this value, try to read from set prop.
12393 if (val == 0) {
12394 memset(prop, 0, sizeof(prop));
12395 property_get("persist.camera.instant.aec", prop, "0");
12396 val = (uint8_t)atoi(prop);
12397 }
12398
12399 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12400 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12401 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12402 mInstantAEC = val;
12403 mInstantAECSettledFrameNumber = 0;
12404 mInstantAecFrameIdxCount = 0;
12405 LOGH("instantAEC value set %d",val);
12406 if (mInstantAEC) {
12407 memset(prop, 0, sizeof(prop));
12408 property_get("persist.camera.ae.instant.bound", prop, "10");
12409 int32_t aec_frame_skip_cnt = atoi(prop);
12410 if (aec_frame_skip_cnt >= 0) {
12411 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12412 } else {
12413 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12414 rc = BAD_VALUE;
12415 }
12416 }
12417 } else {
12418 LOGE("Bad instant aec value set %d", val);
12419 rc = BAD_VALUE;
12420 }
12421 return rc;
12422}
12423
12424/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012425 * FUNCTION : get_num_overall_buffers
12426 *
12427 * DESCRIPTION: Estimate number of pending buffers across all requests.
12428 *
12429 * PARAMETERS : None
12430 *
12431 * RETURN : Number of overall pending buffers
12432 *
12433 *==========================================================================*/
12434uint32_t PendingBuffersMap::get_num_overall_buffers()
12435{
12436 uint32_t sum_buffers = 0;
12437 for (auto &req : mPendingBuffersInRequest) {
12438 sum_buffers += req.mPendingBufferList.size();
12439 }
12440 return sum_buffers;
12441}
12442
12443/*===========================================================================
12444 * FUNCTION : removeBuf
12445 *
12446 * DESCRIPTION: Remove a matching buffer from tracker.
12447 *
12448 * PARAMETERS : @buffer: image buffer for the callback
12449 *
12450 * RETURN : None
12451 *
12452 *==========================================================================*/
12453void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12454{
12455 bool buffer_found = false;
12456 for (auto req = mPendingBuffersInRequest.begin();
12457 req != mPendingBuffersInRequest.end(); req++) {
12458 for (auto k = req->mPendingBufferList.begin();
12459 k != req->mPendingBufferList.end(); k++ ) {
12460 if (k->buffer == buffer) {
12461 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12462 req->frame_number, buffer);
12463 k = req->mPendingBufferList.erase(k);
12464 if (req->mPendingBufferList.empty()) {
12465 // Remove this request from Map
12466 req = mPendingBuffersInRequest.erase(req);
12467 }
12468 buffer_found = true;
12469 break;
12470 }
12471 }
12472 if (buffer_found) {
12473 break;
12474 }
12475 }
12476 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12477 get_num_overall_buffers());
12478}
12479
12480/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012481 * FUNCTION : getBufErrStatus
12482 *
12483 * DESCRIPTION: get buffer error status
12484 *
12485 * PARAMETERS : @buffer: buffer handle
12486 *
12487 * RETURN : Error status
12488 *
12489 *==========================================================================*/
12490int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12491{
12492 for (auto& req : mPendingBuffersInRequest) {
12493 for (auto& k : req.mPendingBufferList) {
12494 if (k.buffer == buffer)
12495 return k.bufStatus;
12496 }
12497 }
12498 return CAMERA3_BUFFER_STATUS_OK;
12499}
12500
12501/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012502 * FUNCTION : setPAAFSupport
12503 *
12504 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12505 * feature mask according to stream type and filter
12506 * arrangement
12507 *
12508 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12509 * @stream_type: stream type
12510 * @filter_arrangement: filter arrangement
12511 *
12512 * RETURN : None
12513 *==========================================================================*/
12514void QCamera3HardwareInterface::setPAAFSupport(
12515 cam_feature_mask_t& feature_mask,
12516 cam_stream_type_t stream_type,
12517 cam_color_filter_arrangement_t filter_arrangement)
12518{
12519 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12520 feature_mask, stream_type, filter_arrangement);
12521
12522 switch (filter_arrangement) {
12523 case CAM_FILTER_ARRANGEMENT_RGGB:
12524 case CAM_FILTER_ARRANGEMENT_GRBG:
12525 case CAM_FILTER_ARRANGEMENT_GBRG:
12526 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012527 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12528 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012529 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12530 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12531 }
12532 break;
12533 case CAM_FILTER_ARRANGEMENT_Y:
12534 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12535 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12536 }
12537 break;
12538 default:
12539 break;
12540 }
12541}
12542
12543/*===========================================================================
12544* FUNCTION : getSensorMountAngle
12545*
12546* DESCRIPTION: Retrieve sensor mount angle
12547*
12548* PARAMETERS : None
12549*
12550* RETURN : sensor mount angle in uint32_t
12551*==========================================================================*/
12552uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12553{
12554 return gCamCapability[mCameraId]->sensor_mount_angle;
12555}
12556
12557/*===========================================================================
12558* FUNCTION : getRelatedCalibrationData
12559*
12560* DESCRIPTION: Retrieve related system calibration data
12561*
12562* PARAMETERS : None
12563*
12564* RETURN : Pointer of related system calibration data
12565*==========================================================================*/
12566const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12567{
12568 return (const cam_related_system_calibration_data_t *)
12569 &(gCamCapability[mCameraId]->related_cam_calibration);
12570}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012571
12572/*===========================================================================
12573 * FUNCTION : is60HzZone
12574 *
12575 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12576 *
12577 * PARAMETERS : None
12578 *
12579 * RETURN : True if in 60Hz zone, False otherwise
12580 *==========================================================================*/
12581bool QCamera3HardwareInterface::is60HzZone()
12582{
12583 time_t t = time(NULL);
12584 struct tm lt;
12585
12586 struct tm* r = localtime_r(&t, &lt);
12587
12588 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12589 return true;
12590 else
12591 return false;
12592}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070012593
12594/*===========================================================================
12595 * FUNCTION : adjustBlackLevelForCFA
12596 *
12597 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
12598 * of bayer CFA (Color Filter Array).
12599 *
12600 * PARAMETERS : @input: black level pattern in the order of RGGB
12601 * @output: black level pattern in the order of CFA
12602 * @color_arrangement: CFA color arrangement
12603 *
12604 * RETURN : None
12605 *==========================================================================*/
12606template<typename T>
12607void QCamera3HardwareInterface::adjustBlackLevelForCFA(
12608 T input[BLACK_LEVEL_PATTERN_CNT],
12609 T output[BLACK_LEVEL_PATTERN_CNT],
12610 cam_color_filter_arrangement_t color_arrangement)
12611{
12612 switch (color_arrangement) {
12613 case CAM_FILTER_ARRANGEMENT_GRBG:
12614 output[0] = input[1];
12615 output[1] = input[0];
12616 output[2] = input[3];
12617 output[3] = input[2];
12618 break;
12619 case CAM_FILTER_ARRANGEMENT_GBRG:
12620 output[0] = input[2];
12621 output[1] = input[3];
12622 output[2] = input[0];
12623 output[3] = input[1];
12624 break;
12625 case CAM_FILTER_ARRANGEMENT_BGGR:
12626 output[0] = input[3];
12627 output[1] = input[2];
12628 output[2] = input[1];
12629 output[3] = input[0];
12630 break;
12631 case CAM_FILTER_ARRANGEMENT_RGGB:
12632 output[0] = input[0];
12633 output[1] = input[1];
12634 output[2] = input[2];
12635 output[3] = input[3];
12636 break;
12637 default:
12638 LOGE("Invalid color arrangement to derive dynamic blacklevel");
12639 break;
12640 }
12641}
Thierry Strudel3d639192016-09-09 11:52:26 -070012642}; //end namespace qcamera