blob: 443c5f13adf8bd1a409f852f7e130d3f46b4249a [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
60extern "C" {
61#include "mm_camera_dbg.h"
62}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080063#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070064
65using namespace android;
66
67namespace qcamera {
68
69#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
70
71#define EMPTY_PIPELINE_DELAY 2
72#define PARTIAL_RESULT_COUNT 2
73#define FRAME_SKIP_DELAY 0
74
75#define MAX_VALUE_8BIT ((1<<8)-1)
76#define MAX_VALUE_10BIT ((1<<10)-1)
77#define MAX_VALUE_12BIT ((1<<12)-1)
78
79#define VIDEO_4K_WIDTH 3840
80#define VIDEO_4K_HEIGHT 2160
81
82#define MAX_EIS_WIDTH 1920
83#define MAX_EIS_HEIGHT 1080
84
85#define MAX_RAW_STREAMS 1
86#define MAX_STALLING_STREAMS 1
87#define MAX_PROCESSED_STREAMS 3
88/* Batch mode is enabled only if FPS set is equal to or greater than this */
89#define MIN_FPS_FOR_BATCH_MODE (120)
90#define PREVIEW_FPS_FOR_HFR (30)
91#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080092#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070093#define MAX_HFR_BATCH_SIZE (8)
94#define REGIONS_TUPLE_COUNT 5
95#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070096// Set a threshold for detection of missing buffers //seconds
97#define MISSING_REQUEST_BUF_TIMEOUT 3
98#define FLUSH_TIMEOUT 3
99#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
100
101#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
102 CAM_QCOM_FEATURE_CROP |\
103 CAM_QCOM_FEATURE_ROTATION |\
104 CAM_QCOM_FEATURE_SHARPNESS |\
105 CAM_QCOM_FEATURE_SCALE |\
106 CAM_QCOM_FEATURE_CAC |\
107 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700108/* Per configuration size for static metadata length*/
109#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700110
111#define TIMEOUT_NEVER -1
112
Thierry Strudel04e026f2016-10-10 11:27:36 -0700113/* Face landmarks indices */
114#define LEFT_EYE_X 0
115#define LEFT_EYE_Y 1
116#define RIGHT_EYE_X 2
117#define RIGHT_EYE_Y 3
118#define MOUTH_X 4
119#define MOUTH_Y 5
120#define TOTAL_LANDMARK_INDICES 6
121
Thierry Strudel3d639192016-09-09 11:52:26 -0700122cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
123const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
124extern pthread_mutex_t gCamLock;
125volatile uint32_t gCamHal3LogLevel = 1;
126extern uint8_t gNumCameraSessions;
127
128const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
129 {"On", CAM_CDS_MODE_ON},
130 {"Off", CAM_CDS_MODE_OFF},
131 {"Auto",CAM_CDS_MODE_AUTO}
132};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700133const QCamera3HardwareInterface::QCameraMap<
134 camera_metadata_enum_android_video_hdr_mode_t,
135 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
136 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
137 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
138};
139
140
141const QCamera3HardwareInterface::QCameraMap<
142 camera_metadata_enum_android_ir_mode_t,
143 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
144 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
145 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
146 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
147};
Thierry Strudel3d639192016-09-09 11:52:26 -0700148
149const QCamera3HardwareInterface::QCameraMap<
150 camera_metadata_enum_android_control_effect_mode_t,
151 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
152 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
153 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
154 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
155 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
156 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
157 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
158 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
159 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
160 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
161};
162
163const QCamera3HardwareInterface::QCameraMap<
164 camera_metadata_enum_android_control_awb_mode_t,
165 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
166 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
167 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
168 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
169 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
170 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
171 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
172 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
173 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
174 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
175};
176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_scene_mode_t,
179 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
180 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
181 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
182 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
183 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
184 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
185 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
187 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
188 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
189 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
190 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
191 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
192 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
193 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
194 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
195 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_af_mode_t,
200 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
201 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
202 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
203 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
204 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
205 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
206 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
207 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
208};
209
210const QCamera3HardwareInterface::QCameraMap<
211 camera_metadata_enum_android_color_correction_aberration_mode_t,
212 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
214 CAM_COLOR_CORRECTION_ABERRATION_OFF },
215 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
216 CAM_COLOR_CORRECTION_ABERRATION_FAST },
217 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
218 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
219};
220
221const QCamera3HardwareInterface::QCameraMap<
222 camera_metadata_enum_android_control_ae_antibanding_mode_t,
223 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
224 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
225 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
226 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
227 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
228};
229
230const QCamera3HardwareInterface::QCameraMap<
231 camera_metadata_enum_android_control_ae_mode_t,
232 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
233 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
234 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
235 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
236 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
237 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_flash_mode_t,
242 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
243 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
244 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
245 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
246};
247
248const QCamera3HardwareInterface::QCameraMap<
249 camera_metadata_enum_android_statistics_face_detect_mode_t,
250 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
251 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
252 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
253 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
254};
255
256const QCamera3HardwareInterface::QCameraMap<
257 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
258 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
260 CAM_FOCUS_UNCALIBRATED },
261 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
262 CAM_FOCUS_APPROXIMATE },
263 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
264 CAM_FOCUS_CALIBRATED }
265};
266
267const QCamera3HardwareInterface::QCameraMap<
268 camera_metadata_enum_android_lens_state_t,
269 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
270 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
271 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
272};
273
274const int32_t available_thumbnail_sizes[] = {0, 0,
275 176, 144,
276 240, 144,
277 256, 144,
278 240, 160,
279 256, 154,
280 240, 240,
281 320, 240};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_sensor_test_pattern_mode_t,
285 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
288 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
292};
293
294/* Since there is no mapping for all the options some Android enum are not listed.
295 * Also, the order in this list is important because while mapping from HAL to Android it will
296 * traverse from lower to higher index which means that for HAL values that are map to different
297 * Android values, the traverse logic will select the first one found.
298 */
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_sensor_reference_illuminant1_t,
301 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
318};
319
320const QCamera3HardwareInterface::QCameraMap<
321 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
322 { 60, CAM_HFR_MODE_60FPS},
323 { 90, CAM_HFR_MODE_90FPS},
324 { 120, CAM_HFR_MODE_120FPS},
325 { 150, CAM_HFR_MODE_150FPS},
326 { 180, CAM_HFR_MODE_180FPS},
327 { 210, CAM_HFR_MODE_210FPS},
328 { 240, CAM_HFR_MODE_240FPS},
329 { 480, CAM_HFR_MODE_480FPS},
330};
331
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700332const QCamera3HardwareInterface::QCameraMap<
333 qcamera3_ext_instant_aec_mode_t,
334 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
335 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
336 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
337 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
338};
Thierry Strudel3d639192016-09-09 11:52:26 -0700339camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
340 .initialize = QCamera3HardwareInterface::initialize,
341 .configure_streams = QCamera3HardwareInterface::configure_streams,
342 .register_stream_buffers = NULL,
343 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
344 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
345 .get_metadata_vendor_tag_ops = NULL,
346 .dump = QCamera3HardwareInterface::dump,
347 .flush = QCamera3HardwareInterface::flush,
348 .reserved = {0},
349};
350
351// initialise to some default value
352uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
353
354/*===========================================================================
355 * FUNCTION : QCamera3HardwareInterface
356 *
357 * DESCRIPTION: constructor of QCamera3HardwareInterface
358 *
359 * PARAMETERS :
360 * @cameraId : camera ID
361 *
362 * RETURN : none
363 *==========================================================================*/
364QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
365 const camera_module_callbacks_t *callbacks)
366 : mCameraId(cameraId),
367 mCameraHandle(NULL),
368 mCameraInitialized(false),
369 mCallbackOps(NULL),
370 mMetadataChannel(NULL),
371 mPictureChannel(NULL),
372 mRawChannel(NULL),
373 mSupportChannel(NULL),
374 mAnalysisChannel(NULL),
375 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700376 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700377 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800378 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700379 mCommon(),
380 mChannelHandle(0),
381 mFirstConfiguration(true),
382 mFlush(false),
383 mFlushPerf(false),
384 mParamHeap(NULL),
385 mParameters(NULL),
386 mPrevParameters(NULL),
387 m_bIsVideo(false),
388 m_bIs4KVideo(false),
389 m_bEisSupportedSize(false),
390 m_bEisEnable(false),
391 m_MobicatMask(0),
392 mMinProcessedFrameDuration(0),
393 mMinJpegFrameDuration(0),
394 mMinRawFrameDuration(0),
395 mMetaFrameCount(0U),
396 mUpdateDebugLevel(false),
397 mCallbacks(callbacks),
398 mCaptureIntent(0),
399 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800400 /* DevCamDebug metadata internal m control*/
401 mDevCamDebugMetaEnable(0),
402 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700403 mBatchSize(0),
404 mToBeQueuedVidBufs(0),
405 mHFRVideoFps(DEFAULT_VIDEO_FPS),
406 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
407 mFirstFrameNumberInBatch(0),
408 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800409 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700410 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
411 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700412 mInstantAEC(false),
413 mResetInstantAEC(false),
414 mInstantAECSettledFrameNumber(0),
415 mAecSkipDisplayFrameBound(0),
416 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700417 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700418 mLastCustIntentFrmNum(-1),
419 mState(CLOSED),
420 mIsDeviceLinked(false),
421 mIsMainCamera(true),
422 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700423 m_pDualCamCmdHeap(NULL),
424 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700425{
426 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700427 mCommon.init(gCamCapability[cameraId]);
428 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700429#ifndef USE_HAL_3_3
430 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
431#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700432 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700433#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700434 mCameraDevice.common.close = close_camera_device;
435 mCameraDevice.ops = &mCameraOps;
436 mCameraDevice.priv = this;
437 gCamCapability[cameraId]->version = CAM_HAL_V3;
438 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
439 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
440 gCamCapability[cameraId]->min_num_pp_bufs = 3;
441
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800442 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700443
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800444 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700445 mPendingLiveRequest = 0;
446 mCurrentRequestId = -1;
447 pthread_mutex_init(&mMutex, NULL);
448
449 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
450 mDefaultMetadata[i] = NULL;
451
452 // Getting system props of different kinds
453 char prop[PROPERTY_VALUE_MAX];
454 memset(prop, 0, sizeof(prop));
455 property_get("persist.camera.raw.dump", prop, "0");
456 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800457 property_get("persist.camera.hal3.force.hdr", prop, "0");
458 mForceHdrSnapshot = atoi(prop);
459
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 if (mEnableRawDump)
461 LOGD("Raw dump from Camera HAL enabled");
462
463 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
464 memset(mLdafCalib, 0, sizeof(mLdafCalib));
465
466 memset(prop, 0, sizeof(prop));
467 property_get("persist.camera.tnr.preview", prop, "0");
468 m_bTnrPreview = (uint8_t)atoi(prop);
469
470 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800471 property_get("persist.camera.swtnr.preview", prop, "1");
472 m_bSwTnrPreview = (uint8_t)atoi(prop);
473
474 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 property_get("persist.camera.tnr.video", prop, "0");
476 m_bTnrVideo = (uint8_t)atoi(prop);
477
478 memset(prop, 0, sizeof(prop));
479 property_get("persist.camera.avtimer.debug", prop, "0");
480 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800481 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700482
483 //Load and read GPU library.
484 lib_surface_utils = NULL;
485 LINK_get_surface_pixel_alignment = NULL;
486 mSurfaceStridePadding = CAM_PAD_TO_32;
487 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
488 if (lib_surface_utils) {
489 *(void **)&LINK_get_surface_pixel_alignment =
490 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
491 if (LINK_get_surface_pixel_alignment) {
492 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
493 }
494 dlclose(lib_surface_utils);
495 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700496
497 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700498}
499
500/*===========================================================================
501 * FUNCTION : ~QCamera3HardwareInterface
502 *
503 * DESCRIPTION: destructor of QCamera3HardwareInterface
504 *
505 * PARAMETERS : none
506 *
507 * RETURN : none
508 *==========================================================================*/
509QCamera3HardwareInterface::~QCamera3HardwareInterface()
510{
511 LOGD("E");
512
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800513 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700514
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800515 // Disable power hint and enable the perf lock for close camera
516 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
517 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
518
519 // unlink of dualcam during close camera
520 if (mIsDeviceLinked) {
521 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
522 &m_pDualCamCmdPtr->bundle_info;
523 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
524 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
525 pthread_mutex_lock(&gCamLock);
526
527 if (mIsMainCamera == 1) {
528 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
529 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
530 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
531 // related session id should be session id of linked session
532 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
533 } else {
534 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
535 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
536 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
537 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
538 }
539 pthread_mutex_unlock(&gCamLock);
540
541 rc = mCameraHandle->ops->set_dual_cam_cmd(
542 mCameraHandle->camera_handle);
543 if (rc < 0) {
544 LOGE("Dualcam: Unlink failed, but still proceed to close");
545 }
546 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700547
548 /* We need to stop all streams before deleting any stream */
549 if (mRawDumpChannel) {
550 mRawDumpChannel->stop();
551 }
552
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700553 if (mHdrPlusRawSrcChannel) {
554 mHdrPlusRawSrcChannel->stop();
555 }
556
Thierry Strudel3d639192016-09-09 11:52:26 -0700557 // NOTE: 'camera3_stream_t *' objects are already freed at
558 // this stage by the framework
559 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
560 it != mStreamInfo.end(); it++) {
561 QCamera3ProcessingChannel *channel = (*it)->channel;
562 if (channel) {
563 channel->stop();
564 }
565 }
566 if (mSupportChannel)
567 mSupportChannel->stop();
568
569 if (mAnalysisChannel) {
570 mAnalysisChannel->stop();
571 }
572 if (mMetadataChannel) {
573 mMetadataChannel->stop();
574 }
575 if (mChannelHandle) {
576 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
577 mChannelHandle);
578 LOGD("stopping channel %d", mChannelHandle);
579 }
580
581 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
582 it != mStreamInfo.end(); it++) {
583 QCamera3ProcessingChannel *channel = (*it)->channel;
584 if (channel)
585 delete channel;
586 free (*it);
587 }
588 if (mSupportChannel) {
589 delete mSupportChannel;
590 mSupportChannel = NULL;
591 }
592
593 if (mAnalysisChannel) {
594 delete mAnalysisChannel;
595 mAnalysisChannel = NULL;
596 }
597 if (mRawDumpChannel) {
598 delete mRawDumpChannel;
599 mRawDumpChannel = NULL;
600 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700601 if (mHdrPlusRawSrcChannel) {
602 delete mHdrPlusRawSrcChannel;
603 mHdrPlusRawSrcChannel = NULL;
604 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 if (mDummyBatchChannel) {
606 delete mDummyBatchChannel;
607 mDummyBatchChannel = NULL;
608 }
609
610 mPictureChannel = NULL;
611
612 if (mMetadataChannel) {
613 delete mMetadataChannel;
614 mMetadataChannel = NULL;
615 }
616
617 /* Clean up all channels */
618 if (mCameraInitialized) {
619 if(!mFirstConfiguration){
620 //send the last unconfigure
621 cam_stream_size_info_t stream_config_info;
622 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
623 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
624 stream_config_info.buffer_info.max_buffers =
625 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700626 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700627 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
628 stream_config_info);
629 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
630 if (rc < 0) {
631 LOGE("set_parms failed for unconfigure");
632 }
633 }
634 deinitParameters();
635 }
636
637 if (mChannelHandle) {
638 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
639 mChannelHandle);
640 LOGH("deleting channel %d", mChannelHandle);
641 mChannelHandle = 0;
642 }
643
644 if (mState != CLOSED)
645 closeCamera();
646
647 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
648 req.mPendingBufferList.clear();
649 }
650 mPendingBuffersMap.mPendingBuffersInRequest.clear();
651 mPendingReprocessResultList.clear();
652 for (pendingRequestIterator i = mPendingRequestsList.begin();
653 i != mPendingRequestsList.end();) {
654 i = erasePendingRequest(i);
655 }
656 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
657 if (mDefaultMetadata[i])
658 free_camera_metadata(mDefaultMetadata[i]);
659
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800660 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700661
662 pthread_cond_destroy(&mRequestCond);
663
664 pthread_cond_destroy(&mBuffersCond);
665
666 pthread_mutex_destroy(&mMutex);
667 LOGD("X");
668}
669
670/*===========================================================================
671 * FUNCTION : erasePendingRequest
672 *
673 * DESCRIPTION: function to erase a desired pending request after freeing any
674 * allocated memory
675 *
676 * PARAMETERS :
677 * @i : iterator pointing to pending request to be erased
678 *
679 * RETURN : iterator pointing to the next request
680 *==========================================================================*/
681QCamera3HardwareInterface::pendingRequestIterator
682 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
683{
684 if (i->input_buffer != NULL) {
685 free(i->input_buffer);
686 i->input_buffer = NULL;
687 }
688 if (i->settings != NULL)
689 free_camera_metadata((camera_metadata_t*)i->settings);
690 return mPendingRequestsList.erase(i);
691}
692
693/*===========================================================================
694 * FUNCTION : camEvtHandle
695 *
696 * DESCRIPTION: Function registered to mm-camera-interface to handle events
697 *
698 * PARAMETERS :
699 * @camera_handle : interface layer camera handle
700 * @evt : ptr to event
701 * @user_data : user data ptr
702 *
703 * RETURN : none
704 *==========================================================================*/
705void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
706 mm_camera_event_t *evt,
707 void *user_data)
708{
709 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
710 if (obj && evt) {
711 switch(evt->server_event_type) {
712 case CAM_EVENT_TYPE_DAEMON_DIED:
713 pthread_mutex_lock(&obj->mMutex);
714 obj->mState = ERROR;
715 pthread_mutex_unlock(&obj->mMutex);
716 LOGE("Fatal, camera daemon died");
717 break;
718
719 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
720 LOGD("HAL got request pull from Daemon");
721 pthread_mutex_lock(&obj->mMutex);
722 obj->mWokenUpByDaemon = true;
723 obj->unblockRequestIfNecessary();
724 pthread_mutex_unlock(&obj->mMutex);
725 break;
726
727 default:
728 LOGW("Warning: Unhandled event %d",
729 evt->server_event_type);
730 break;
731 }
732 } else {
733 LOGE("NULL user_data/evt");
734 }
735}
736
737/*===========================================================================
738 * FUNCTION : openCamera
739 *
740 * DESCRIPTION: open camera
741 *
742 * PARAMETERS :
743 * @hw_device : double ptr for camera device struct
744 *
745 * RETURN : int32_t type of status
746 * NO_ERROR -- success
747 * none-zero failure code
748 *==========================================================================*/
749int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
750{
751 int rc = 0;
752 if (mState != CLOSED) {
753 *hw_device = NULL;
754 return PERMISSION_DENIED;
755 }
756
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700758 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
759 mCameraId);
760
761 rc = openCamera();
762 if (rc == 0) {
763 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800764 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700765 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800766 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700767
Thierry Strudel3d639192016-09-09 11:52:26 -0700768 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
769 mCameraId, rc);
770
771 if (rc == NO_ERROR) {
772 mState = OPENED;
773 }
774 return rc;
775}
776
777/*===========================================================================
778 * FUNCTION : openCamera
779 *
780 * DESCRIPTION: open camera
781 *
782 * PARAMETERS : none
783 *
784 * RETURN : int32_t type of status
785 * NO_ERROR -- success
786 * none-zero failure code
787 *==========================================================================*/
788int QCamera3HardwareInterface::openCamera()
789{
790 int rc = 0;
791 char value[PROPERTY_VALUE_MAX];
792
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800793 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700794 if (mCameraHandle) {
795 LOGE("Failure: Camera already opened");
796 return ALREADY_EXISTS;
797 }
798
799 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
800 if (rc < 0) {
801 LOGE("Failed to reserve flash for camera id: %d",
802 mCameraId);
803 return UNKNOWN_ERROR;
804 }
805
806 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
807 if (rc) {
808 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
809 return rc;
810 }
811
812 if (!mCameraHandle) {
813 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
814 return -ENODEV;
815 }
816
817 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
818 camEvtHandle, (void *)this);
819
820 if (rc < 0) {
821 LOGE("Error, failed to register event callback");
822 /* Not closing camera here since it is already handled in destructor */
823 return FAILED_TRANSACTION;
824 }
825
826 mExifParams.debug_params =
827 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
828 if (mExifParams.debug_params) {
829 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
830 } else {
831 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
832 return NO_MEMORY;
833 }
834 mFirstConfiguration = true;
835
836 //Notify display HAL that a camera session is active.
837 //But avoid calling the same during bootup because camera service might open/close
838 //cameras at boot time during its initialization and display service will also internally
839 //wait for camera service to initialize first while calling this display API, resulting in a
840 //deadlock situation. Since boot time camera open/close calls are made only to fetch
841 //capabilities, no need of this display bw optimization.
842 //Use "service.bootanim.exit" property to know boot status.
843 property_get("service.bootanim.exit", value, "0");
844 if (atoi(value) == 1) {
845 pthread_mutex_lock(&gCamLock);
846 if (gNumCameraSessions++ == 0) {
847 setCameraLaunchStatus(true);
848 }
849 pthread_mutex_unlock(&gCamLock);
850 }
851
852 //fill the session id needed while linking dual cam
853 pthread_mutex_lock(&gCamLock);
854 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
855 &sessionId[mCameraId]);
856 pthread_mutex_unlock(&gCamLock);
857
858 if (rc < 0) {
859 LOGE("Error, failed to get sessiion id");
860 return UNKNOWN_ERROR;
861 } else {
862 //Allocate related cam sync buffer
863 //this is needed for the payload that goes along with bundling cmd for related
864 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700865 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
866 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 if(rc != OK) {
868 rc = NO_MEMORY;
869 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
870 return NO_MEMORY;
871 }
872
873 //Map memory for related cam sync buffer
874 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700875 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
876 m_pDualCamCmdHeap->getFd(0),
877 sizeof(cam_dual_camera_cmd_info_t),
878 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700879 if(rc < 0) {
880 LOGE("Dualcam: failed to map Related cam sync buffer");
881 rc = FAILED_TRANSACTION;
882 return NO_MEMORY;
883 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700884 m_pDualCamCmdPtr =
885 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700886 }
887
888 LOGH("mCameraId=%d",mCameraId);
889
890 return NO_ERROR;
891}
892
893/*===========================================================================
894 * FUNCTION : closeCamera
895 *
896 * DESCRIPTION: close camera
897 *
898 * PARAMETERS : none
899 *
900 * RETURN : int32_t type of status
901 * NO_ERROR -- success
902 * none-zero failure code
903 *==========================================================================*/
904int QCamera3HardwareInterface::closeCamera()
905{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800906 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700907 int rc = NO_ERROR;
908 char value[PROPERTY_VALUE_MAX];
909
910 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
911 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700912
913 // unmap memory for related cam sync buffer
914 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800915 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700916 if (NULL != m_pDualCamCmdHeap) {
917 m_pDualCamCmdHeap->deallocate();
918 delete m_pDualCamCmdHeap;
919 m_pDualCamCmdHeap = NULL;
920 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700921 }
922
Thierry Strudel3d639192016-09-09 11:52:26 -0700923 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
924 mCameraHandle = NULL;
925
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700926 // Disconnect from HDR+ client.
927 if (mHdrPlusClient != nullptr) {
928 mHdrPlusClient->disconnect();
929 mHdrPlusClient = nullptr;
930 }
931
Thierry Strudel3d639192016-09-09 11:52:26 -0700932 //reset session id to some invalid id
933 pthread_mutex_lock(&gCamLock);
934 sessionId[mCameraId] = 0xDEADBEEF;
935 pthread_mutex_unlock(&gCamLock);
936
937 //Notify display HAL that there is no active camera session
938 //but avoid calling the same during bootup. Refer to openCamera
939 //for more details.
940 property_get("service.bootanim.exit", value, "0");
941 if (atoi(value) == 1) {
942 pthread_mutex_lock(&gCamLock);
943 if (--gNumCameraSessions == 0) {
944 setCameraLaunchStatus(false);
945 }
946 pthread_mutex_unlock(&gCamLock);
947 }
948
Thierry Strudel3d639192016-09-09 11:52:26 -0700949 if (mExifParams.debug_params) {
950 free(mExifParams.debug_params);
951 mExifParams.debug_params = NULL;
952 }
953 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
954 LOGW("Failed to release flash for camera id: %d",
955 mCameraId);
956 }
957 mState = CLOSED;
958 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
959 mCameraId, rc);
960 return rc;
961}
962
963/*===========================================================================
964 * FUNCTION : initialize
965 *
966 * DESCRIPTION: Initialize frameworks callback functions
967 *
968 * PARAMETERS :
969 * @callback_ops : callback function to frameworks
970 *
971 * RETURN :
972 *
973 *==========================================================================*/
974int QCamera3HardwareInterface::initialize(
975 const struct camera3_callback_ops *callback_ops)
976{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800977 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 int rc;
979
980 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
981 pthread_mutex_lock(&mMutex);
982
983 // Validate current state
984 switch (mState) {
985 case OPENED:
986 /* valid state */
987 break;
988 default:
989 LOGE("Invalid state %d", mState);
990 rc = -ENODEV;
991 goto err1;
992 }
993
994 rc = initParameters();
995 if (rc < 0) {
996 LOGE("initParamters failed %d", rc);
997 goto err1;
998 }
999 mCallbackOps = callback_ops;
1000
1001 mChannelHandle = mCameraHandle->ops->add_channel(
1002 mCameraHandle->camera_handle, NULL, NULL, this);
1003 if (mChannelHandle == 0) {
1004 LOGE("add_channel failed");
1005 rc = -ENOMEM;
1006 pthread_mutex_unlock(&mMutex);
1007 return rc;
1008 }
1009
1010 pthread_mutex_unlock(&mMutex);
1011 mCameraInitialized = true;
1012 mState = INITIALIZED;
1013 LOGI("X");
1014 return 0;
1015
1016err1:
1017 pthread_mutex_unlock(&mMutex);
1018 return rc;
1019}
1020
1021/*===========================================================================
1022 * FUNCTION : validateStreamDimensions
1023 *
1024 * DESCRIPTION: Check if the configuration requested are those advertised
1025 *
1026 * PARAMETERS :
1027 * @stream_list : streams to be configured
1028 *
1029 * RETURN :
1030 *
1031 *==========================================================================*/
1032int QCamera3HardwareInterface::validateStreamDimensions(
1033 camera3_stream_configuration_t *streamList)
1034{
1035 int rc = NO_ERROR;
1036 size_t count = 0;
1037
1038 camera3_stream_t *inputStream = NULL;
1039 /*
1040 * Loop through all streams to find input stream if it exists*
1041 */
1042 for (size_t i = 0; i< streamList->num_streams; i++) {
1043 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1044 if (inputStream != NULL) {
1045 LOGE("Error, Multiple input streams requested");
1046 return -EINVAL;
1047 }
1048 inputStream = streamList->streams[i];
1049 }
1050 }
1051 /*
1052 * Loop through all streams requested in configuration
1053 * Check if unsupported sizes have been requested on any of them
1054 */
1055 for (size_t j = 0; j < streamList->num_streams; j++) {
1056 bool sizeFound = false;
1057 camera3_stream_t *newStream = streamList->streams[j];
1058
1059 uint32_t rotatedHeight = newStream->height;
1060 uint32_t rotatedWidth = newStream->width;
1061 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1062 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1063 rotatedHeight = newStream->width;
1064 rotatedWidth = newStream->height;
1065 }
1066
1067 /*
1068 * Sizes are different for each type of stream format check against
1069 * appropriate table.
1070 */
1071 switch (newStream->format) {
1072 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1073 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1074 case HAL_PIXEL_FORMAT_RAW10:
1075 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1076 for (size_t i = 0; i < count; i++) {
1077 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1078 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1079 sizeFound = true;
1080 break;
1081 }
1082 }
1083 break;
1084 case HAL_PIXEL_FORMAT_BLOB:
1085 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1086 /* Verify set size against generated sizes table */
1087 for (size_t i = 0; i < count; i++) {
1088 if (((int32_t)rotatedWidth ==
1089 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1090 ((int32_t)rotatedHeight ==
1091 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1092 sizeFound = true;
1093 break;
1094 }
1095 }
1096 break;
1097 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1098 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1099 default:
1100 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1101 || newStream->stream_type == CAMERA3_STREAM_INPUT
1102 || IS_USAGE_ZSL(newStream->usage)) {
1103 if (((int32_t)rotatedWidth ==
1104 gCamCapability[mCameraId]->active_array_size.width) &&
1105 ((int32_t)rotatedHeight ==
1106 gCamCapability[mCameraId]->active_array_size.height)) {
1107 sizeFound = true;
1108 break;
1109 }
1110 /* We could potentially break here to enforce ZSL stream
1111 * set from frameworks always is full active array size
1112 * but it is not clear from the spc if framework will always
1113 * follow that, also we have logic to override to full array
1114 * size, so keeping the logic lenient at the moment
1115 */
1116 }
1117 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1118 MAX_SIZES_CNT);
1119 for (size_t i = 0; i < count; i++) {
1120 if (((int32_t)rotatedWidth ==
1121 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1122 ((int32_t)rotatedHeight ==
1123 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1124 sizeFound = true;
1125 break;
1126 }
1127 }
1128 break;
1129 } /* End of switch(newStream->format) */
1130
1131 /* We error out even if a single stream has unsupported size set */
1132 if (!sizeFound) {
1133 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1134 rotatedWidth, rotatedHeight, newStream->format,
1135 gCamCapability[mCameraId]->active_array_size.width,
1136 gCamCapability[mCameraId]->active_array_size.height);
1137 rc = -EINVAL;
1138 break;
1139 }
1140 } /* End of for each stream */
1141 return rc;
1142}
1143
1144/*==============================================================================
1145 * FUNCTION : isSupportChannelNeeded
1146 *
1147 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1148 *
1149 * PARAMETERS :
1150 * @stream_list : streams to be configured
1151 * @stream_config_info : the config info for streams to be configured
1152 *
1153 * RETURN : Boolen true/false decision
1154 *
1155 *==========================================================================*/
1156bool QCamera3HardwareInterface::isSupportChannelNeeded(
1157 camera3_stream_configuration_t *streamList,
1158 cam_stream_size_info_t stream_config_info)
1159{
1160 uint32_t i;
1161 bool pprocRequested = false;
1162 /* Check for conditions where PProc pipeline does not have any streams*/
1163 for (i = 0; i < stream_config_info.num_streams; i++) {
1164 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1165 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1166 pprocRequested = true;
1167 break;
1168 }
1169 }
1170
1171 if (pprocRequested == false )
1172 return true;
1173
1174 /* Dummy stream needed if only raw or jpeg streams present */
1175 for (i = 0; i < streamList->num_streams; i++) {
1176 switch(streamList->streams[i]->format) {
1177 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1178 case HAL_PIXEL_FORMAT_RAW10:
1179 case HAL_PIXEL_FORMAT_RAW16:
1180 case HAL_PIXEL_FORMAT_BLOB:
1181 break;
1182 default:
1183 return false;
1184 }
1185 }
1186 return true;
1187}
1188
1189/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001190 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001191 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001192 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001193 *
1194 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001195 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001196 *
1197 * RETURN : int32_t type of status
1198 * NO_ERROR -- success
1199 * none-zero failure code
1200 *
1201 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001202int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001203{
1204 int32_t rc = NO_ERROR;
1205
1206 cam_dimension_t max_dim = {0, 0};
1207 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1208 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1209 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1210 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1211 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1212 }
1213
1214 clear_metadata_buffer(mParameters);
1215
1216 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1217 max_dim);
1218 if (rc != NO_ERROR) {
1219 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1220 return rc;
1221 }
1222
1223 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1224 if (rc != NO_ERROR) {
1225 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1226 return rc;
1227 }
1228
1229 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001230 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001231
1232 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1233 mParameters);
1234 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001235 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001236 return rc;
1237 }
1238
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001239 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1240 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1241 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1242 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1243 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001244
1245 return rc;
1246}
1247
1248/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001249 * FUNCTION : addToPPFeatureMask
1250 *
1251 * DESCRIPTION: add additional features to pp feature mask based on
1252 * stream type and usecase
1253 *
1254 * PARAMETERS :
1255 * @stream_format : stream type for feature mask
1256 * @stream_idx : stream idx within postprocess_mask list to change
1257 *
1258 * RETURN : NULL
1259 *
1260 *==========================================================================*/
1261void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1262 uint32_t stream_idx)
1263{
1264 char feature_mask_value[PROPERTY_VALUE_MAX];
1265 cam_feature_mask_t feature_mask;
1266 int args_converted;
1267 int property_len;
1268
1269 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001270#ifdef _LE_CAMERA_
1271 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1272 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1273 property_len = property_get("persist.camera.hal3.feature",
1274 feature_mask_value, swtnr_feature_mask_value);
1275#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001276 property_len = property_get("persist.camera.hal3.feature",
1277 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001278#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001279 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1280 (feature_mask_value[1] == 'x')) {
1281 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1282 } else {
1283 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1284 }
1285 if (1 != args_converted) {
1286 feature_mask = 0;
1287 LOGE("Wrong feature mask %s", feature_mask_value);
1288 return;
1289 }
1290
1291 switch (stream_format) {
1292 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1293 /* Add LLVD to pp feature mask only if video hint is enabled */
1294 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1295 mStreamConfigInfo.postprocess_mask[stream_idx]
1296 |= CAM_QTI_FEATURE_SW_TNR;
1297 LOGH("Added SW TNR to pp feature mask");
1298 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1299 mStreamConfigInfo.postprocess_mask[stream_idx]
1300 |= CAM_QCOM_FEATURE_LLVD;
1301 LOGH("Added LLVD SeeMore to pp feature mask");
1302 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001303 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1304 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1305 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001307 break;
1308 }
1309 default:
1310 break;
1311 }
1312 LOGD("PP feature mask %llx",
1313 mStreamConfigInfo.postprocess_mask[stream_idx]);
1314}
1315
1316/*==============================================================================
1317 * FUNCTION : updateFpsInPreviewBuffer
1318 *
1319 * DESCRIPTION: update FPS information in preview buffer.
1320 *
1321 * PARAMETERS :
1322 * @metadata : pointer to metadata buffer
1323 * @frame_number: frame_number to look for in pending buffer list
1324 *
1325 * RETURN : None
1326 *
1327 *==========================================================================*/
1328void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1329 uint32_t frame_number)
1330{
1331 // Mark all pending buffers for this particular request
1332 // with corresponding framerate information
1333 for (List<PendingBuffersInRequest>::iterator req =
1334 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1335 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1336 for(List<PendingBufferInfo>::iterator j =
1337 req->mPendingBufferList.begin();
1338 j != req->mPendingBufferList.end(); j++) {
1339 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1340 if ((req->frame_number == frame_number) &&
1341 (channel->getStreamTypeMask() &
1342 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1343 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1344 CAM_INTF_PARM_FPS_RANGE, metadata) {
1345 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1346 struct private_handle_t *priv_handle =
1347 (struct private_handle_t *)(*(j->buffer));
1348 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1349 }
1350 }
1351 }
1352 }
1353}
1354
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001355/*==============================================================================
1356 * FUNCTION : updateTimeStampInPendingBuffers
1357 *
1358 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1359 * of a frame number
1360 *
1361 * PARAMETERS :
1362 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1363 * @timestamp : timestamp to be set
1364 *
1365 * RETURN : None
1366 *
1367 *==========================================================================*/
1368void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1369 uint32_t frameNumber, nsecs_t timestamp)
1370{
1371 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1372 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1373 if (req->frame_number != frameNumber)
1374 continue;
1375
1376 for (auto k = req->mPendingBufferList.begin();
1377 k != req->mPendingBufferList.end(); k++ ) {
1378 struct private_handle_t *priv_handle =
1379 (struct private_handle_t *) (*(k->buffer));
1380 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1381 }
1382 }
1383 return;
1384}
1385
Thierry Strudel3d639192016-09-09 11:52:26 -07001386/*===========================================================================
1387 * FUNCTION : configureStreams
1388 *
1389 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1390 * and output streams.
1391 *
1392 * PARAMETERS :
1393 * @stream_list : streams to be configured
1394 *
1395 * RETURN :
1396 *
1397 *==========================================================================*/
1398int QCamera3HardwareInterface::configureStreams(
1399 camera3_stream_configuration_t *streamList)
1400{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001401 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001402 int rc = 0;
1403
1404 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001405 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001406 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001407 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001408
1409 return rc;
1410}
1411
1412/*===========================================================================
1413 * FUNCTION : configureStreamsPerfLocked
1414 *
1415 * DESCRIPTION: configureStreams while perfLock is held.
1416 *
1417 * PARAMETERS :
1418 * @stream_list : streams to be configured
1419 *
1420 * RETURN : int32_t type of status
1421 * NO_ERROR -- success
1422 * none-zero failure code
1423 *==========================================================================*/
1424int QCamera3HardwareInterface::configureStreamsPerfLocked(
1425 camera3_stream_configuration_t *streamList)
1426{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001427 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001428 int rc = 0;
1429
1430 // Sanity check stream_list
1431 if (streamList == NULL) {
1432 LOGE("NULL stream configuration");
1433 return BAD_VALUE;
1434 }
1435 if (streamList->streams == NULL) {
1436 LOGE("NULL stream list");
1437 return BAD_VALUE;
1438 }
1439
1440 if (streamList->num_streams < 1) {
1441 LOGE("Bad number of streams requested: %d",
1442 streamList->num_streams);
1443 return BAD_VALUE;
1444 }
1445
1446 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1447 LOGE("Maximum number of streams %d exceeded: %d",
1448 MAX_NUM_STREAMS, streamList->num_streams);
1449 return BAD_VALUE;
1450 }
1451
1452 mOpMode = streamList->operation_mode;
1453 LOGD("mOpMode: %d", mOpMode);
1454
1455 /* first invalidate all the steams in the mStreamList
1456 * if they appear again, they will be validated */
1457 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1458 it != mStreamInfo.end(); it++) {
1459 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1460 if (channel) {
1461 channel->stop();
1462 }
1463 (*it)->status = INVALID;
1464 }
1465
1466 if (mRawDumpChannel) {
1467 mRawDumpChannel->stop();
1468 delete mRawDumpChannel;
1469 mRawDumpChannel = NULL;
1470 }
1471
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001472 if (mHdrPlusRawSrcChannel) {
1473 mHdrPlusRawSrcChannel->stop();
1474 delete mHdrPlusRawSrcChannel;
1475 mHdrPlusRawSrcChannel = NULL;
1476 }
1477
Thierry Strudel3d639192016-09-09 11:52:26 -07001478 if (mSupportChannel)
1479 mSupportChannel->stop();
1480
1481 if (mAnalysisChannel) {
1482 mAnalysisChannel->stop();
1483 }
1484 if (mMetadataChannel) {
1485 /* If content of mStreamInfo is not 0, there is metadata stream */
1486 mMetadataChannel->stop();
1487 }
1488 if (mChannelHandle) {
1489 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1490 mChannelHandle);
1491 LOGD("stopping channel %d", mChannelHandle);
1492 }
1493
1494 pthread_mutex_lock(&mMutex);
1495
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001496 // Check if HDR+ is enabled.
1497 char prop[PROPERTY_VALUE_MAX];
1498 property_get("persist.camera.hdrplus", prop, "0");
1499 bool enableHdrPlus = atoi(prop);
1500 if (enableHdrPlus) {
1501 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1502 // Connect to HDR+ client if not yet.
1503 if (mHdrPlusClient == nullptr) {
1504 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1505 rc = mHdrPlusClient->connect(this);
1506 if (rc < 0) {
1507 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1508 strerror(-rc), rc);
1509 pthread_mutex_unlock(&mMutex);
1510 return -ENODEV;
1511 }
1512
1513 // Set static metadata.
1514 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1515 if (rc < 0) {
1516 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1517 strerror(-rc), rc);
1518 pthread_mutex_unlock(&mMutex);
1519 return -ENODEV;
1520 }
1521 }
1522 } else {
1523 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1524 // Disconnect from HDR+ client if HDR+ is not enabled.
1525 if (mHdrPlusClient != nullptr) {
1526 mHdrPlusClient->disconnect();
1527 mHdrPlusClient = nullptr;
1528 }
1529 }
1530
Thierry Strudel3d639192016-09-09 11:52:26 -07001531 // Check state
1532 switch (mState) {
1533 case INITIALIZED:
1534 case CONFIGURED:
1535 case STARTED:
1536 /* valid state */
1537 break;
1538 default:
1539 LOGE("Invalid state %d", mState);
1540 pthread_mutex_unlock(&mMutex);
1541 return -ENODEV;
1542 }
1543
1544 /* Check whether we have video stream */
1545 m_bIs4KVideo = false;
1546 m_bIsVideo = false;
1547 m_bEisSupportedSize = false;
1548 m_bTnrEnabled = false;
1549 bool isZsl = false;
1550 uint32_t videoWidth = 0U;
1551 uint32_t videoHeight = 0U;
1552 size_t rawStreamCnt = 0;
1553 size_t stallStreamCnt = 0;
1554 size_t processedStreamCnt = 0;
1555 // Number of streams on ISP encoder path
1556 size_t numStreamsOnEncoder = 0;
1557 size_t numYuv888OnEncoder = 0;
1558 bool bYuv888OverrideJpeg = false;
1559 cam_dimension_t largeYuv888Size = {0, 0};
1560 cam_dimension_t maxViewfinderSize = {0, 0};
1561 bool bJpegExceeds4K = false;
1562 bool bJpegOnEncoder = false;
1563 bool bUseCommonFeatureMask = false;
1564 cam_feature_mask_t commonFeatureMask = 0;
1565 bool bSmallJpegSize = false;
1566 uint32_t width_ratio;
1567 uint32_t height_ratio;
1568 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1569 camera3_stream_t *inputStream = NULL;
1570 bool isJpeg = false;
1571 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001572 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001573
1574 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1575
1576 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001577 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001578 uint8_t eis_prop_set;
1579 uint32_t maxEisWidth = 0;
1580 uint32_t maxEisHeight = 0;
1581
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001582 // Initialize all instant AEC related variables
1583 mInstantAEC = false;
1584 mResetInstantAEC = false;
1585 mInstantAECSettledFrameNumber = 0;
1586 mAecSkipDisplayFrameBound = 0;
1587 mInstantAecFrameIdxCount = 0;
1588
Thierry Strudel3d639192016-09-09 11:52:26 -07001589 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1590
1591 size_t count = IS_TYPE_MAX;
1592 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1593 for (size_t i = 0; i < count; i++) {
1594 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001595 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1596 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001597 break;
1598 }
1599 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001600 count = CAM_OPT_STAB_MAX;
1601 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1602 for (size_t i = 0; i < count; i++) {
1603 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1604 oisSupported = true;
1605 break;
1606 }
1607 }
1608
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001609 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001610 maxEisWidth = MAX_EIS_WIDTH;
1611 maxEisHeight = MAX_EIS_HEIGHT;
1612 }
1613
1614 /* EIS setprop control */
1615 char eis_prop[PROPERTY_VALUE_MAX];
1616 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001617 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001618 eis_prop_set = (uint8_t)atoi(eis_prop);
1619
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001620 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001621 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1622
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001623 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1624 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1625
Thierry Strudel3d639192016-09-09 11:52:26 -07001626 /* stream configurations */
1627 for (size_t i = 0; i < streamList->num_streams; i++) {
1628 camera3_stream_t *newStream = streamList->streams[i];
1629 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1630 "height = %d, rotation = %d, usage = 0x%x",
1631 i, newStream->stream_type, newStream->format,
1632 newStream->width, newStream->height, newStream->rotation,
1633 newStream->usage);
1634 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1635 newStream->stream_type == CAMERA3_STREAM_INPUT){
1636 isZsl = true;
1637 }
1638 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1639 inputStream = newStream;
1640 }
1641
1642 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1643 isJpeg = true;
1644 jpegSize.width = newStream->width;
1645 jpegSize.height = newStream->height;
1646 if (newStream->width > VIDEO_4K_WIDTH ||
1647 newStream->height > VIDEO_4K_HEIGHT)
1648 bJpegExceeds4K = true;
1649 }
1650
1651 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1652 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1653 m_bIsVideo = true;
1654 videoWidth = newStream->width;
1655 videoHeight = newStream->height;
1656 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1657 (VIDEO_4K_HEIGHT <= newStream->height)) {
1658 m_bIs4KVideo = true;
1659 }
1660 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1661 (newStream->height <= maxEisHeight);
1662 }
1663 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1664 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1665 switch (newStream->format) {
1666 case HAL_PIXEL_FORMAT_BLOB:
1667 stallStreamCnt++;
1668 if (isOnEncoder(maxViewfinderSize, newStream->width,
1669 newStream->height)) {
1670 numStreamsOnEncoder++;
1671 bJpegOnEncoder = true;
1672 }
1673 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1674 newStream->width);
1675 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1676 newStream->height);;
1677 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1678 "FATAL: max_downscale_factor cannot be zero and so assert");
1679 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1680 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1681 LOGH("Setting small jpeg size flag to true");
1682 bSmallJpegSize = true;
1683 }
1684 break;
1685 case HAL_PIXEL_FORMAT_RAW10:
1686 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1687 case HAL_PIXEL_FORMAT_RAW16:
1688 rawStreamCnt++;
1689 break;
1690 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1691 processedStreamCnt++;
1692 if (isOnEncoder(maxViewfinderSize, newStream->width,
1693 newStream->height)) {
1694 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1695 !IS_USAGE_ZSL(newStream->usage)) {
1696 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1697 }
1698 numStreamsOnEncoder++;
1699 }
1700 break;
1701 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1702 processedStreamCnt++;
1703 if (isOnEncoder(maxViewfinderSize, newStream->width,
1704 newStream->height)) {
1705 // If Yuv888 size is not greater than 4K, set feature mask
1706 // to SUPERSET so that it support concurrent request on
1707 // YUV and JPEG.
1708 if (newStream->width <= VIDEO_4K_WIDTH &&
1709 newStream->height <= VIDEO_4K_HEIGHT) {
1710 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1711 }
1712 numStreamsOnEncoder++;
1713 numYuv888OnEncoder++;
1714 largeYuv888Size.width = newStream->width;
1715 largeYuv888Size.height = newStream->height;
1716 }
1717 break;
1718 default:
1719 processedStreamCnt++;
1720 if (isOnEncoder(maxViewfinderSize, newStream->width,
1721 newStream->height)) {
1722 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1723 numStreamsOnEncoder++;
1724 }
1725 break;
1726 }
1727
1728 }
1729 }
1730
1731 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1732 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1733 !m_bIsVideo) {
1734 m_bEisEnable = false;
1735 }
1736
1737 /* Logic to enable/disable TNR based on specific config size/etc.*/
1738 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1739 ((videoWidth == 1920 && videoHeight == 1080) ||
1740 (videoWidth == 1280 && videoHeight == 720)) &&
1741 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1742 m_bTnrEnabled = true;
1743
1744 /* Check if num_streams is sane */
1745 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1746 rawStreamCnt > MAX_RAW_STREAMS ||
1747 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1748 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1749 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1750 pthread_mutex_unlock(&mMutex);
1751 return -EINVAL;
1752 }
1753 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001754 if (isZsl && m_bIs4KVideo) {
1755 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001756 pthread_mutex_unlock(&mMutex);
1757 return -EINVAL;
1758 }
1759 /* Check if stream sizes are sane */
1760 if (numStreamsOnEncoder > 2) {
1761 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1762 pthread_mutex_unlock(&mMutex);
1763 return -EINVAL;
1764 } else if (1 < numStreamsOnEncoder){
1765 bUseCommonFeatureMask = true;
1766 LOGH("Multiple streams above max viewfinder size, common mask needed");
1767 }
1768
1769 /* Check if BLOB size is greater than 4k in 4k recording case */
1770 if (m_bIs4KVideo && bJpegExceeds4K) {
1771 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1772 pthread_mutex_unlock(&mMutex);
1773 return -EINVAL;
1774 }
1775
1776 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1777 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1778 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1779 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1780 // configurations:
1781 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1782 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1783 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1784 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1785 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1786 __func__);
1787 pthread_mutex_unlock(&mMutex);
1788 return -EINVAL;
1789 }
1790
1791 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1792 // the YUV stream's size is greater or equal to the JPEG size, set common
1793 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1794 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1795 jpegSize.width, jpegSize.height) &&
1796 largeYuv888Size.width > jpegSize.width &&
1797 largeYuv888Size.height > jpegSize.height) {
1798 bYuv888OverrideJpeg = true;
1799 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1800 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1801 }
1802
1803 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1804 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1805 commonFeatureMask);
1806 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1807 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1808
1809 rc = validateStreamDimensions(streamList);
1810 if (rc == NO_ERROR) {
1811 rc = validateStreamRotations(streamList);
1812 }
1813 if (rc != NO_ERROR) {
1814 LOGE("Invalid stream configuration requested!");
1815 pthread_mutex_unlock(&mMutex);
1816 return rc;
1817 }
1818
1819 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1820 for (size_t i = 0; i < streamList->num_streams; i++) {
1821 camera3_stream_t *newStream = streamList->streams[i];
1822 LOGH("newStream type = %d, stream format = %d "
1823 "stream size : %d x %d, stream rotation = %d",
1824 newStream->stream_type, newStream->format,
1825 newStream->width, newStream->height, newStream->rotation);
1826 //if the stream is in the mStreamList validate it
1827 bool stream_exists = false;
1828 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1829 it != mStreamInfo.end(); it++) {
1830 if ((*it)->stream == newStream) {
1831 QCamera3ProcessingChannel *channel =
1832 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1833 stream_exists = true;
1834 if (channel)
1835 delete channel;
1836 (*it)->status = VALID;
1837 (*it)->stream->priv = NULL;
1838 (*it)->channel = NULL;
1839 }
1840 }
1841 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1842 //new stream
1843 stream_info_t* stream_info;
1844 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1845 if (!stream_info) {
1846 LOGE("Could not allocate stream info");
1847 rc = -ENOMEM;
1848 pthread_mutex_unlock(&mMutex);
1849 return rc;
1850 }
1851 stream_info->stream = newStream;
1852 stream_info->status = VALID;
1853 stream_info->channel = NULL;
1854 mStreamInfo.push_back(stream_info);
1855 }
1856 /* Covers Opaque ZSL and API1 F/W ZSL */
1857 if (IS_USAGE_ZSL(newStream->usage)
1858 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1859 if (zslStream != NULL) {
1860 LOGE("Multiple input/reprocess streams requested!");
1861 pthread_mutex_unlock(&mMutex);
1862 return BAD_VALUE;
1863 }
1864 zslStream = newStream;
1865 }
1866 /* Covers YUV reprocess */
1867 if (inputStream != NULL) {
1868 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1869 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1870 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1871 && inputStream->width == newStream->width
1872 && inputStream->height == newStream->height) {
1873 if (zslStream != NULL) {
1874 /* This scenario indicates multiple YUV streams with same size
1875 * as input stream have been requested, since zsl stream handle
1876 * is solely use for the purpose of overriding the size of streams
1877 * which share h/w streams we will just make a guess here as to
1878 * which of the stream is a ZSL stream, this will be refactored
1879 * once we make generic logic for streams sharing encoder output
1880 */
1881 LOGH("Warning, Multiple ip/reprocess streams requested!");
1882 }
1883 zslStream = newStream;
1884 }
1885 }
1886 }
1887
1888 /* If a zsl stream is set, we know that we have configured at least one input or
1889 bidirectional stream */
1890 if (NULL != zslStream) {
1891 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1892 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1893 mInputStreamInfo.format = zslStream->format;
1894 mInputStreamInfo.usage = zslStream->usage;
1895 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1896 mInputStreamInfo.dim.width,
1897 mInputStreamInfo.dim.height,
1898 mInputStreamInfo.format, mInputStreamInfo.usage);
1899 }
1900
1901 cleanAndSortStreamInfo();
1902 if (mMetadataChannel) {
1903 delete mMetadataChannel;
1904 mMetadataChannel = NULL;
1905 }
1906 if (mSupportChannel) {
1907 delete mSupportChannel;
1908 mSupportChannel = NULL;
1909 }
1910
1911 if (mAnalysisChannel) {
1912 delete mAnalysisChannel;
1913 mAnalysisChannel = NULL;
1914 }
1915
1916 if (mDummyBatchChannel) {
1917 delete mDummyBatchChannel;
1918 mDummyBatchChannel = NULL;
1919 }
1920
1921 //Create metadata channel and initialize it
1922 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1923 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1924 gCamCapability[mCameraId]->color_arrangement);
1925 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1926 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001927 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001928 if (mMetadataChannel == NULL) {
1929 LOGE("failed to allocate metadata channel");
1930 rc = -ENOMEM;
1931 pthread_mutex_unlock(&mMutex);
1932 return rc;
1933 }
1934 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1935 if (rc < 0) {
1936 LOGE("metadata channel initialization failed");
1937 delete mMetadataChannel;
1938 mMetadataChannel = NULL;
1939 pthread_mutex_unlock(&mMutex);
1940 return rc;
1941 }
1942
Thierry Strudel3d639192016-09-09 11:52:26 -07001943 bool isRawStreamRequested = false;
1944 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1945 /* Allocate channel objects for the requested streams */
1946 for (size_t i = 0; i < streamList->num_streams; i++) {
1947 camera3_stream_t *newStream = streamList->streams[i];
1948 uint32_t stream_usage = newStream->usage;
1949 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1950 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1951 struct camera_info *p_info = NULL;
1952 pthread_mutex_lock(&gCamLock);
1953 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1954 pthread_mutex_unlock(&gCamLock);
1955 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1956 || IS_USAGE_ZSL(newStream->usage)) &&
1957 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1958 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1959 if (bUseCommonFeatureMask) {
1960 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1961 commonFeatureMask;
1962 } else {
1963 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1964 CAM_QCOM_FEATURE_NONE;
1965 }
1966
1967 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1968 LOGH("Input stream configured, reprocess config");
1969 } else {
1970 //for non zsl streams find out the format
1971 switch (newStream->format) {
1972 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1973 {
1974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1976 /* add additional features to pp feature mask */
1977 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1978 mStreamConfigInfo.num_streams);
1979
1980 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1981 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1982 CAM_STREAM_TYPE_VIDEO;
1983 if (m_bTnrEnabled && m_bTnrVideo) {
1984 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1985 CAM_QCOM_FEATURE_CPP_TNR;
1986 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1987 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1988 ~CAM_QCOM_FEATURE_CDS;
1989 }
1990 } else {
1991 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1992 CAM_STREAM_TYPE_PREVIEW;
1993 if (m_bTnrEnabled && m_bTnrPreview) {
1994 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1995 CAM_QCOM_FEATURE_CPP_TNR;
1996 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1997 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1998 ~CAM_QCOM_FEATURE_CDS;
1999 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002000 if(!m_bSwTnrPreview) {
2001 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2002 ~CAM_QTI_FEATURE_SW_TNR;
2003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002004 padding_info.width_padding = mSurfaceStridePadding;
2005 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002006 previewSize.width = (int32_t)newStream->width;
2007 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 }
2009 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2010 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2011 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2012 newStream->height;
2013 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2014 newStream->width;
2015 }
2016 }
2017 break;
2018 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2019 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2020 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2021 if (bUseCommonFeatureMask)
2022 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2023 commonFeatureMask;
2024 else
2025 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2026 CAM_QCOM_FEATURE_NONE;
2027 } else {
2028 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2029 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2030 }
2031 break;
2032 case HAL_PIXEL_FORMAT_BLOB:
2033 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2034 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2035 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2036 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2037 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2038 } else {
2039 if (bUseCommonFeatureMask &&
2040 isOnEncoder(maxViewfinderSize, newStream->width,
2041 newStream->height)) {
2042 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2043 } else {
2044 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2045 }
2046 }
2047 if (isZsl) {
2048 if (zslStream) {
2049 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2050 (int32_t)zslStream->width;
2051 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2052 (int32_t)zslStream->height;
2053 } else {
2054 LOGE("Error, No ZSL stream identified");
2055 pthread_mutex_unlock(&mMutex);
2056 return -EINVAL;
2057 }
2058 } else if (m_bIs4KVideo) {
2059 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2060 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2061 } else if (bYuv888OverrideJpeg) {
2062 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2063 (int32_t)largeYuv888Size.width;
2064 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2065 (int32_t)largeYuv888Size.height;
2066 }
2067 break;
2068 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2069 case HAL_PIXEL_FORMAT_RAW16:
2070 case HAL_PIXEL_FORMAT_RAW10:
2071 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2072 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2073 isRawStreamRequested = true;
2074 break;
2075 default:
2076 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2077 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2078 break;
2079 }
2080 }
2081
2082 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2083 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2084 gCamCapability[mCameraId]->color_arrangement);
2085
2086 if (newStream->priv == NULL) {
2087 //New stream, construct channel
2088 switch (newStream->stream_type) {
2089 case CAMERA3_STREAM_INPUT:
2090 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2091 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2092 break;
2093 case CAMERA3_STREAM_BIDIRECTIONAL:
2094 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2095 GRALLOC_USAGE_HW_CAMERA_WRITE;
2096 break;
2097 case CAMERA3_STREAM_OUTPUT:
2098 /* For video encoding stream, set read/write rarely
2099 * flag so that they may be set to un-cached */
2100 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2101 newStream->usage |=
2102 (GRALLOC_USAGE_SW_READ_RARELY |
2103 GRALLOC_USAGE_SW_WRITE_RARELY |
2104 GRALLOC_USAGE_HW_CAMERA_WRITE);
2105 else if (IS_USAGE_ZSL(newStream->usage))
2106 {
2107 LOGD("ZSL usage flag skipping");
2108 }
2109 else if (newStream == zslStream
2110 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2111 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2112 } else
2113 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2114 break;
2115 default:
2116 LOGE("Invalid stream_type %d", newStream->stream_type);
2117 break;
2118 }
2119
2120 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2121 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2122 QCamera3ProcessingChannel *channel = NULL;
2123 switch (newStream->format) {
2124 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2125 if ((newStream->usage &
2126 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2127 (streamList->operation_mode ==
2128 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2129 ) {
2130 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2131 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002132 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002133 this,
2134 newStream,
2135 (cam_stream_type_t)
2136 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2137 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2138 mMetadataChannel,
2139 0); //heap buffers are not required for HFR video channel
2140 if (channel == NULL) {
2141 LOGE("allocation of channel failed");
2142 pthread_mutex_unlock(&mMutex);
2143 return -ENOMEM;
2144 }
2145 //channel->getNumBuffers() will return 0 here so use
2146 //MAX_INFLIGH_HFR_REQUESTS
2147 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2148 newStream->priv = channel;
2149 LOGI("num video buffers in HFR mode: %d",
2150 MAX_INFLIGHT_HFR_REQUESTS);
2151 } else {
2152 /* Copy stream contents in HFR preview only case to create
2153 * dummy batch channel so that sensor streaming is in
2154 * HFR mode */
2155 if (!m_bIsVideo && (streamList->operation_mode ==
2156 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2157 mDummyBatchStream = *newStream;
2158 }
2159 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2160 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002161 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002162 this,
2163 newStream,
2164 (cam_stream_type_t)
2165 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2166 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2167 mMetadataChannel,
2168 MAX_INFLIGHT_REQUESTS);
2169 if (channel == NULL) {
2170 LOGE("allocation of channel failed");
2171 pthread_mutex_unlock(&mMutex);
2172 return -ENOMEM;
2173 }
2174 newStream->max_buffers = channel->getNumBuffers();
2175 newStream->priv = channel;
2176 }
2177 break;
2178 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2179 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2180 mChannelHandle,
2181 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002182 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002183 this,
2184 newStream,
2185 (cam_stream_type_t)
2186 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2187 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2188 mMetadataChannel);
2189 if (channel == NULL) {
2190 LOGE("allocation of YUV channel failed");
2191 pthread_mutex_unlock(&mMutex);
2192 return -ENOMEM;
2193 }
2194 newStream->max_buffers = channel->getNumBuffers();
2195 newStream->priv = channel;
2196 break;
2197 }
2198 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2199 case HAL_PIXEL_FORMAT_RAW16:
2200 case HAL_PIXEL_FORMAT_RAW10:
2201 mRawChannel = new QCamera3RawChannel(
2202 mCameraHandle->camera_handle, mChannelHandle,
2203 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002204 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002205 this, newStream,
2206 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2207 mMetadataChannel,
2208 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2209 if (mRawChannel == NULL) {
2210 LOGE("allocation of raw channel failed");
2211 pthread_mutex_unlock(&mMutex);
2212 return -ENOMEM;
2213 }
2214 newStream->max_buffers = mRawChannel->getNumBuffers();
2215 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2216 break;
2217 case HAL_PIXEL_FORMAT_BLOB:
2218 // Max live snapshot inflight buffer is 1. This is to mitigate
2219 // frame drop issues for video snapshot. The more buffers being
2220 // allocated, the more frame drops there are.
2221 mPictureChannel = new QCamera3PicChannel(
2222 mCameraHandle->camera_handle, mChannelHandle,
2223 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002224 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002225 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2226 m_bIs4KVideo, isZsl, mMetadataChannel,
2227 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2228 if (mPictureChannel == NULL) {
2229 LOGE("allocation of channel failed");
2230 pthread_mutex_unlock(&mMutex);
2231 return -ENOMEM;
2232 }
2233 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2234 newStream->max_buffers = mPictureChannel->getNumBuffers();
2235 mPictureChannel->overrideYuvSize(
2236 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2237 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2238 break;
2239
2240 default:
2241 LOGE("not a supported format 0x%x", newStream->format);
2242 break;
2243 }
2244 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2245 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2246 } else {
2247 LOGE("Error, Unknown stream type");
2248 pthread_mutex_unlock(&mMutex);
2249 return -EINVAL;
2250 }
2251
2252 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2253 if (channel != NULL && channel->isUBWCEnabled()) {
2254 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002255 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2256 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002257 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2258 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2259 }
2260 }
2261
2262 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2263 it != mStreamInfo.end(); it++) {
2264 if ((*it)->stream == newStream) {
2265 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2266 break;
2267 }
2268 }
2269 } else {
2270 // Channel already exists for this stream
2271 // Do nothing for now
2272 }
2273 padding_info = gCamCapability[mCameraId]->padding_info;
2274
2275 /* Do not add entries for input stream in metastream info
2276 * since there is no real stream associated with it
2277 */
2278 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2279 mStreamConfigInfo.num_streams++;
2280 }
2281
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002282 // Create analysis stream all the time, even when h/w support is not available
2283 {
2284 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2285 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2286 gCamCapability[mCameraId]->color_arrangement);
2287 cam_analysis_info_t analysisInfo;
2288 int32_t ret = NO_ERROR;
2289 ret = mCommon.getAnalysisInfo(
2290 FALSE,
2291 analysisFeatureMask,
2292 &analysisInfo);
2293 if (ret == NO_ERROR) {
2294 cam_dimension_t analysisDim;
2295 analysisDim = mCommon.getMatchingDimension(previewSize,
2296 analysisInfo.analysis_recommended_res);
2297
2298 mAnalysisChannel = new QCamera3SupportChannel(
2299 mCameraHandle->camera_handle,
2300 mChannelHandle,
2301 mCameraHandle->ops,
2302 &analysisInfo.analysis_padding_info,
2303 analysisFeatureMask,
2304 CAM_STREAM_TYPE_ANALYSIS,
2305 &analysisDim,
2306 (analysisInfo.analysis_format
2307 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2308 : CAM_FORMAT_YUV_420_NV21),
2309 analysisInfo.hw_analysis_supported,
2310 gCamCapability[mCameraId]->color_arrangement,
2311 this,
2312 0); // force buffer count to 0
2313 } else {
2314 LOGW("getAnalysisInfo failed, ret = %d", ret);
2315 }
2316 if (!mAnalysisChannel) {
2317 LOGW("Analysis channel cannot be created");
2318 }
2319 }
2320
Thierry Strudel3d639192016-09-09 11:52:26 -07002321 //RAW DUMP channel
2322 if (mEnableRawDump && isRawStreamRequested == false){
2323 cam_dimension_t rawDumpSize;
2324 rawDumpSize = getMaxRawSize(mCameraId);
2325 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2326 setPAAFSupport(rawDumpFeatureMask,
2327 CAM_STREAM_TYPE_RAW,
2328 gCamCapability[mCameraId]->color_arrangement);
2329 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2330 mChannelHandle,
2331 mCameraHandle->ops,
2332 rawDumpSize,
2333 &padding_info,
2334 this, rawDumpFeatureMask);
2335 if (!mRawDumpChannel) {
2336 LOGE("Raw Dump channel cannot be created");
2337 pthread_mutex_unlock(&mMutex);
2338 return -ENOMEM;
2339 }
2340 }
2341
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002342 // Initialize HDR+ Raw Source channel.
2343 if (mHdrPlusClient != nullptr) {
2344 if (isRawStreamRequested || mRawDumpChannel) {
2345 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2346 __FUNCTION__);
2347 mHdrPlusClient->disconnect();
2348 mHdrPlusClient = nullptr;
2349 } else {
2350 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2351 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2352 setPAAFSupport(hdrPlusRawFeatureMask,
2353 CAM_STREAM_TYPE_RAW,
2354 gCamCapability[mCameraId]->color_arrangement);
2355 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2356 mChannelHandle,
2357 mCameraHandle->ops,
2358 rawSize,
2359 &padding_info,
2360 this, hdrPlusRawFeatureMask);
2361 if (!mHdrPlusRawSrcChannel) {
2362 LOGE("HDR+ Raw Source channel cannot be created");
2363 pthread_mutex_unlock(&mMutex);
2364 return -ENOMEM;
2365 }
2366 }
2367 }
2368
Thierry Strudel3d639192016-09-09 11:52:26 -07002369
2370 if (mAnalysisChannel) {
2371 cam_analysis_info_t analysisInfo;
2372 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2373 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2374 CAM_STREAM_TYPE_ANALYSIS;
2375 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2376 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2377 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2378 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2379 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002380 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2382 &analysisInfo);
2383 if (rc != NO_ERROR) {
2384 LOGE("getAnalysisInfo failed, ret = %d", rc);
2385 pthread_mutex_unlock(&mMutex);
2386 return rc;
2387 }
2388 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002389 mCommon.getMatchingDimension(previewSize,
2390 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 mStreamConfigInfo.num_streams++;
2392 }
2393
2394 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2395 cam_analysis_info_t supportInfo;
2396 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2397 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2398 setPAAFSupport(callbackFeatureMask,
2399 CAM_STREAM_TYPE_CALLBACK,
2400 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002401 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002402 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002403 if (ret != NO_ERROR) {
2404 /* Ignore the error for Mono camera
2405 * because the PAAF bit mask is only set
2406 * for CAM_STREAM_TYPE_ANALYSIS stream type
2407 */
2408 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2409 LOGW("getAnalysisInfo failed, ret = %d", ret);
2410 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 }
2412 mSupportChannel = new QCamera3SupportChannel(
2413 mCameraHandle->camera_handle,
2414 mChannelHandle,
2415 mCameraHandle->ops,
2416 &gCamCapability[mCameraId]->padding_info,
2417 callbackFeatureMask,
2418 CAM_STREAM_TYPE_CALLBACK,
2419 &QCamera3SupportChannel::kDim,
2420 CAM_FORMAT_YUV_420_NV21,
2421 supportInfo.hw_analysis_supported,
2422 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002423 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 if (!mSupportChannel) {
2425 LOGE("dummy channel cannot be created");
2426 pthread_mutex_unlock(&mMutex);
2427 return -ENOMEM;
2428 }
2429 }
2430
2431 if (mSupportChannel) {
2432 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2433 QCamera3SupportChannel::kDim;
2434 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2435 CAM_STREAM_TYPE_CALLBACK;
2436 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2437 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2438 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2439 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2440 gCamCapability[mCameraId]->color_arrangement);
2441 mStreamConfigInfo.num_streams++;
2442 }
2443
2444 if (mRawDumpChannel) {
2445 cam_dimension_t rawSize;
2446 rawSize = getMaxRawSize(mCameraId);
2447 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2448 rawSize;
2449 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2450 CAM_STREAM_TYPE_RAW;
2451 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2452 CAM_QCOM_FEATURE_NONE;
2453 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2454 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2455 gCamCapability[mCameraId]->color_arrangement);
2456 mStreamConfigInfo.num_streams++;
2457 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002458
2459 if (mHdrPlusRawSrcChannel) {
2460 cam_dimension_t rawSize;
2461 rawSize = getMaxRawSize(mCameraId);
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2463 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2465 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2466 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2467 gCamCapability[mCameraId]->color_arrangement);
2468 mStreamConfigInfo.num_streams++;
2469 }
2470
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 /* In HFR mode, if video stream is not added, create a dummy channel so that
2472 * ISP can create a batch mode even for preview only case. This channel is
2473 * never 'start'ed (no stream-on), it is only 'initialized' */
2474 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2475 !m_bIsVideo) {
2476 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2477 setPAAFSupport(dummyFeatureMask,
2478 CAM_STREAM_TYPE_VIDEO,
2479 gCamCapability[mCameraId]->color_arrangement);
2480 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2481 mChannelHandle,
2482 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002483 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002484 this,
2485 &mDummyBatchStream,
2486 CAM_STREAM_TYPE_VIDEO,
2487 dummyFeatureMask,
2488 mMetadataChannel);
2489 if (NULL == mDummyBatchChannel) {
2490 LOGE("creation of mDummyBatchChannel failed."
2491 "Preview will use non-hfr sensor mode ");
2492 }
2493 }
2494 if (mDummyBatchChannel) {
2495 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2496 mDummyBatchStream.width;
2497 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2498 mDummyBatchStream.height;
2499 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2500 CAM_STREAM_TYPE_VIDEO;
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2503 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2504 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2505 gCamCapability[mCameraId]->color_arrangement);
2506 mStreamConfigInfo.num_streams++;
2507 }
2508
2509 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2510 mStreamConfigInfo.buffer_info.max_buffers =
2511 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2512
2513 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2514 for (pendingRequestIterator i = mPendingRequestsList.begin();
2515 i != mPendingRequestsList.end();) {
2516 i = erasePendingRequest(i);
2517 }
2518 mPendingFrameDropList.clear();
2519 // Initialize/Reset the pending buffers list
2520 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2521 req.mPendingBufferList.clear();
2522 }
2523 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2524
2525 mPendingReprocessResultList.clear();
2526
2527 mCurJpegMeta.clear();
2528 //Get min frame duration for this streams configuration
2529 deriveMinFrameDuration();
2530
2531 // Update state
2532 mState = CONFIGURED;
2533
2534 pthread_mutex_unlock(&mMutex);
2535
2536 return rc;
2537}
2538
2539/*===========================================================================
2540 * FUNCTION : validateCaptureRequest
2541 *
2542 * DESCRIPTION: validate a capture request from camera service
2543 *
2544 * PARAMETERS :
2545 * @request : request from framework to process
2546 *
2547 * RETURN :
2548 *
2549 *==========================================================================*/
2550int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002551 camera3_capture_request_t *request,
2552 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002553{
2554 ssize_t idx = 0;
2555 const camera3_stream_buffer_t *b;
2556 CameraMetadata meta;
2557
2558 /* Sanity check the request */
2559 if (request == NULL) {
2560 LOGE("NULL capture request");
2561 return BAD_VALUE;
2562 }
2563
2564 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2565 /*settings cannot be null for the first request*/
2566 return BAD_VALUE;
2567 }
2568
2569 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002570 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2571 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 LOGE("Request %d: No output buffers provided!",
2573 __FUNCTION__, frameNumber);
2574 return BAD_VALUE;
2575 }
2576 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2577 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2578 request->num_output_buffers, MAX_NUM_STREAMS);
2579 return BAD_VALUE;
2580 }
2581 if (request->input_buffer != NULL) {
2582 b = request->input_buffer;
2583 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2584 LOGE("Request %d: Buffer %ld: Status not OK!",
2585 frameNumber, (long)idx);
2586 return BAD_VALUE;
2587 }
2588 if (b->release_fence != -1) {
2589 LOGE("Request %d: Buffer %ld: Has a release fence!",
2590 frameNumber, (long)idx);
2591 return BAD_VALUE;
2592 }
2593 if (b->buffer == NULL) {
2594 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2595 frameNumber, (long)idx);
2596 return BAD_VALUE;
2597 }
2598 }
2599
2600 // Validate all buffers
2601 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002602 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002603 QCamera3ProcessingChannel *channel =
2604 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2605 if (channel == NULL) {
2606 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2607 frameNumber, (long)idx);
2608 return BAD_VALUE;
2609 }
2610 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2611 LOGE("Request %d: Buffer %ld: Status not OK!",
2612 frameNumber, (long)idx);
2613 return BAD_VALUE;
2614 }
2615 if (b->release_fence != -1) {
2616 LOGE("Request %d: Buffer %ld: Has a release fence!",
2617 frameNumber, (long)idx);
2618 return BAD_VALUE;
2619 }
2620 if (b->buffer == NULL) {
2621 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2622 frameNumber, (long)idx);
2623 return BAD_VALUE;
2624 }
2625 if (*(b->buffer) == NULL) {
2626 LOGE("Request %d: Buffer %ld: NULL private handle!",
2627 frameNumber, (long)idx);
2628 return BAD_VALUE;
2629 }
2630 idx++;
2631 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002632 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002633 return NO_ERROR;
2634}
2635
2636/*===========================================================================
2637 * FUNCTION : deriveMinFrameDuration
2638 *
2639 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2640 * on currently configured streams.
2641 *
2642 * PARAMETERS : NONE
2643 *
2644 * RETURN : NONE
2645 *
2646 *==========================================================================*/
2647void QCamera3HardwareInterface::deriveMinFrameDuration()
2648{
2649 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2650
2651 maxJpegDim = 0;
2652 maxProcessedDim = 0;
2653 maxRawDim = 0;
2654
2655 // Figure out maximum jpeg, processed, and raw dimensions
2656 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2657 it != mStreamInfo.end(); it++) {
2658
2659 // Input stream doesn't have valid stream_type
2660 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2661 continue;
2662
2663 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2664 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2665 if (dimension > maxJpegDim)
2666 maxJpegDim = dimension;
2667 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2668 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2669 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2670 if (dimension > maxRawDim)
2671 maxRawDim = dimension;
2672 } else {
2673 if (dimension > maxProcessedDim)
2674 maxProcessedDim = dimension;
2675 }
2676 }
2677
2678 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2679 MAX_SIZES_CNT);
2680
2681 //Assume all jpeg dimensions are in processed dimensions.
2682 if (maxJpegDim > maxProcessedDim)
2683 maxProcessedDim = maxJpegDim;
2684 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2685 if (maxProcessedDim > maxRawDim) {
2686 maxRawDim = INT32_MAX;
2687
2688 for (size_t i = 0; i < count; i++) {
2689 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2690 gCamCapability[mCameraId]->raw_dim[i].height;
2691 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2692 maxRawDim = dimension;
2693 }
2694 }
2695
2696 //Find minimum durations for processed, jpeg, and raw
2697 for (size_t i = 0; i < count; i++) {
2698 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2699 gCamCapability[mCameraId]->raw_dim[i].height) {
2700 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2701 break;
2702 }
2703 }
2704 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2705 for (size_t i = 0; i < count; i++) {
2706 if (maxProcessedDim ==
2707 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2708 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2709 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2710 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2711 break;
2712 }
2713 }
2714}
2715
2716/*===========================================================================
2717 * FUNCTION : getMinFrameDuration
2718 *
2719 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2720 * and current request configuration.
2721 *
2722 * PARAMETERS : @request: requset sent by the frameworks
2723 *
2724 * RETURN : min farme duration for a particular request
2725 *
2726 *==========================================================================*/
2727int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2728{
2729 bool hasJpegStream = false;
2730 bool hasRawStream = false;
2731 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2732 const camera3_stream_t *stream = request->output_buffers[i].stream;
2733 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2734 hasJpegStream = true;
2735 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2736 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2737 stream->format == HAL_PIXEL_FORMAT_RAW16)
2738 hasRawStream = true;
2739 }
2740
2741 if (!hasJpegStream)
2742 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2743 else
2744 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2745}
2746
2747/*===========================================================================
2748 * FUNCTION : handleBuffersDuringFlushLock
2749 *
2750 * DESCRIPTION: Account for buffers returned from back-end during flush
2751 * This function is executed while mMutex is held by the caller.
2752 *
2753 * PARAMETERS :
2754 * @buffer: image buffer for the callback
2755 *
2756 * RETURN :
2757 *==========================================================================*/
2758void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2759{
2760 bool buffer_found = false;
2761 for (List<PendingBuffersInRequest>::iterator req =
2762 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2763 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2764 for (List<PendingBufferInfo>::iterator i =
2765 req->mPendingBufferList.begin();
2766 i != req->mPendingBufferList.end(); i++) {
2767 if (i->buffer == buffer->buffer) {
2768 mPendingBuffersMap.numPendingBufsAtFlush--;
2769 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2770 buffer->buffer, req->frame_number,
2771 mPendingBuffersMap.numPendingBufsAtFlush);
2772 buffer_found = true;
2773 break;
2774 }
2775 }
2776 if (buffer_found) {
2777 break;
2778 }
2779 }
2780 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2781 //signal the flush()
2782 LOGD("All buffers returned to HAL. Continue flush");
2783 pthread_cond_signal(&mBuffersCond);
2784 }
2785}
2786
2787
2788/*===========================================================================
2789 * FUNCTION : handlePendingReprocResults
2790 *
2791 * DESCRIPTION: check and notify on any pending reprocess results
2792 *
2793 * PARAMETERS :
2794 * @frame_number : Pending request frame number
2795 *
2796 * RETURN : int32_t type of status
2797 * NO_ERROR -- success
2798 * none-zero failure code
2799 *==========================================================================*/
2800int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2801{
2802 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2803 j != mPendingReprocessResultList.end(); j++) {
2804 if (j->frame_number == frame_number) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002805 orchestrateNotify(&j->notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07002806
2807 LOGD("Delayed reprocess notify %d",
2808 frame_number);
2809
2810 for (pendingRequestIterator k = mPendingRequestsList.begin();
2811 k != mPendingRequestsList.end(); k++) {
2812
2813 if (k->frame_number == j->frame_number) {
2814 LOGD("Found reprocess frame number %d in pending reprocess List "
2815 "Take it out!!",
2816 k->frame_number);
2817
2818 camera3_capture_result result;
2819 memset(&result, 0, sizeof(camera3_capture_result));
2820 result.frame_number = frame_number;
2821 result.num_output_buffers = 1;
2822 result.output_buffers = &j->buffer;
2823 result.input_buffer = k->input_buffer;
2824 result.result = k->settings;
2825 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002826 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07002827
2828 erasePendingRequest(k);
2829 break;
2830 }
2831 }
2832 mPendingReprocessResultList.erase(j);
2833 break;
2834 }
2835 }
2836 return NO_ERROR;
2837}
2838
2839/*===========================================================================
2840 * FUNCTION : handleBatchMetadata
2841 *
2842 * DESCRIPTION: Handles metadata buffer callback in batch mode
2843 *
2844 * PARAMETERS : @metadata_buf: metadata buffer
2845 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2846 * the meta buf in this method
2847 *
2848 * RETURN :
2849 *
2850 *==========================================================================*/
2851void QCamera3HardwareInterface::handleBatchMetadata(
2852 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2853{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002854 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002855
2856 if (NULL == metadata_buf) {
2857 LOGE("metadata_buf is NULL");
2858 return;
2859 }
2860 /* In batch mode, the metdata will contain the frame number and timestamp of
2861 * the last frame in the batch. Eg: a batch containing buffers from request
2862 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2863 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2864 * multiple process_capture_results */
2865 metadata_buffer_t *metadata =
2866 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2867 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2868 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2869 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2870 uint32_t frame_number = 0, urgent_frame_number = 0;
2871 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2872 bool invalid_metadata = false;
2873 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2874 size_t loopCount = 1;
2875
2876 int32_t *p_frame_number_valid =
2877 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2878 uint32_t *p_frame_number =
2879 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2880 int64_t *p_capture_time =
2881 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2882 int32_t *p_urgent_frame_number_valid =
2883 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2884 uint32_t *p_urgent_frame_number =
2885 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2886
2887 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2888 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2889 (NULL == p_urgent_frame_number)) {
2890 LOGE("Invalid metadata");
2891 invalid_metadata = true;
2892 } else {
2893 frame_number_valid = *p_frame_number_valid;
2894 last_frame_number = *p_frame_number;
2895 last_frame_capture_time = *p_capture_time;
2896 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2897 last_urgent_frame_number = *p_urgent_frame_number;
2898 }
2899
2900 /* In batchmode, when no video buffers are requested, set_parms are sent
2901 * for every capture_request. The difference between consecutive urgent
2902 * frame numbers and frame numbers should be used to interpolate the
2903 * corresponding frame numbers and time stamps */
2904 pthread_mutex_lock(&mMutex);
2905 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002906 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2907 if(idx < 0) {
2908 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2909 last_urgent_frame_number);
2910 mState = ERROR;
2911 pthread_mutex_unlock(&mMutex);
2912 return;
2913 }
2914 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002915 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2916 first_urgent_frame_number;
2917
2918 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2919 urgent_frame_number_valid,
2920 first_urgent_frame_number, last_urgent_frame_number);
2921 }
2922
2923 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002924 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2925 if(idx < 0) {
2926 LOGE("Invalid frame number received: %d. Irrecoverable error",
2927 last_frame_number);
2928 mState = ERROR;
2929 pthread_mutex_unlock(&mMutex);
2930 return;
2931 }
2932 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 frameNumDiff = last_frame_number + 1 -
2934 first_frame_number;
2935 mPendingBatchMap.removeItem(last_frame_number);
2936
2937 LOGD("frm: valid: %d frm_num: %d - %d",
2938 frame_number_valid,
2939 first_frame_number, last_frame_number);
2940
2941 }
2942 pthread_mutex_unlock(&mMutex);
2943
2944 if (urgent_frame_number_valid || frame_number_valid) {
2945 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2946 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2947 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2948 urgentFrameNumDiff, last_urgent_frame_number);
2949 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2950 LOGE("frameNumDiff: %d frameNum: %d",
2951 frameNumDiff, last_frame_number);
2952 }
2953
2954 for (size_t i = 0; i < loopCount; i++) {
2955 /* handleMetadataWithLock is called even for invalid_metadata for
2956 * pipeline depth calculation */
2957 if (!invalid_metadata) {
2958 /* Infer frame number. Batch metadata contains frame number of the
2959 * last frame */
2960 if (urgent_frame_number_valid) {
2961 if (i < urgentFrameNumDiff) {
2962 urgent_frame_number =
2963 first_urgent_frame_number + i;
2964 LOGD("inferred urgent frame_number: %d",
2965 urgent_frame_number);
2966 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2967 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2968 } else {
2969 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2970 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2971 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2972 }
2973 }
2974
2975 /* Infer frame number. Batch metadata contains frame number of the
2976 * last frame */
2977 if (frame_number_valid) {
2978 if (i < frameNumDiff) {
2979 frame_number = first_frame_number + i;
2980 LOGD("inferred frame_number: %d", frame_number);
2981 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2982 CAM_INTF_META_FRAME_NUMBER, frame_number);
2983 } else {
2984 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2985 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2986 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2987 }
2988 }
2989
2990 if (last_frame_capture_time) {
2991 //Infer timestamp
2992 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002993 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002994 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002995 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002996 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2997 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2998 LOGD("batch capture_time: %lld, capture_time: %lld",
2999 last_frame_capture_time, capture_time);
3000 }
3001 }
3002 pthread_mutex_lock(&mMutex);
3003 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003004 false /* free_and_bufdone_meta_buf */,
3005 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003006 pthread_mutex_unlock(&mMutex);
3007 }
3008
3009 /* BufDone metadata buffer */
3010 if (free_and_bufdone_meta_buf) {
3011 mMetadataChannel->bufDone(metadata_buf);
3012 free(metadata_buf);
3013 }
3014}
3015
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003016void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3017 camera3_error_msg_code_t errorCode)
3018{
3019 camera3_notify_msg_t notify_msg;
3020 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3021 notify_msg.type = CAMERA3_MSG_ERROR;
3022 notify_msg.message.error.error_code = errorCode;
3023 notify_msg.message.error.error_stream = NULL;
3024 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003025 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003026
3027 return;
3028}
Thierry Strudel3d639192016-09-09 11:52:26 -07003029/*===========================================================================
3030 * FUNCTION : handleMetadataWithLock
3031 *
3032 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3033 *
3034 * PARAMETERS : @metadata_buf: metadata buffer
3035 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3036 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003037 * @firstMetadataInBatch: Boolean to indicate whether this is the
3038 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07003039 *
3040 * RETURN :
3041 *
3042 *==========================================================================*/
3043void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003044 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
3045 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07003046{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003047 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003048 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3049 //during flush do not send metadata from this thread
3050 LOGD("not sending metadata during flush or when mState is error");
3051 if (free_and_bufdone_meta_buf) {
3052 mMetadataChannel->bufDone(metadata_buf);
3053 free(metadata_buf);
3054 }
3055 return;
3056 }
3057
3058 //not in flush
3059 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3060 int32_t frame_number_valid, urgent_frame_number_valid;
3061 uint32_t frame_number, urgent_frame_number;
3062 int64_t capture_time;
3063 nsecs_t currentSysTime;
3064
3065 int32_t *p_frame_number_valid =
3066 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3067 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3068 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3069 int32_t *p_urgent_frame_number_valid =
3070 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3071 uint32_t *p_urgent_frame_number =
3072 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3073 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3074 metadata) {
3075 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3076 *p_frame_number_valid, *p_frame_number);
3077 }
3078
3079 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3080 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3081 LOGE("Invalid metadata");
3082 if (free_and_bufdone_meta_buf) {
3083 mMetadataChannel->bufDone(metadata_buf);
3084 free(metadata_buf);
3085 }
3086 goto done_metadata;
3087 }
3088 frame_number_valid = *p_frame_number_valid;
3089 frame_number = *p_frame_number;
3090 capture_time = *p_capture_time;
3091 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3092 urgent_frame_number = *p_urgent_frame_number;
3093 currentSysTime = systemTime(CLOCK_MONOTONIC);
3094
3095 // Detect if buffers from any requests are overdue
3096 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3097 if ( (currentSysTime - req.timestamp) >
3098 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
3099 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003100 assert(missed.stream->priv);
3101 if (missed.stream->priv) {
3102 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3103 assert(ch->mStreams[0]);
3104 if (ch->mStreams[0]) {
3105 LOGE("Cancel missing frame = %d, buffer = %p,"
3106 "stream type = %d, stream format = %d",
3107 req.frame_number, missed.buffer,
3108 ch->mStreams[0]->getMyType(), missed.stream->format);
3109 ch->timeoutFrame(req.frame_number);
3110 }
3111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003112 }
3113 }
3114 }
3115 //Partial result on process_capture_result for timestamp
3116 if (urgent_frame_number_valid) {
3117 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3118 urgent_frame_number, capture_time);
3119
3120 //Recieved an urgent Frame Number, handle it
3121 //using partial results
3122 for (pendingRequestIterator i =
3123 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3124 LOGD("Iterator Frame = %d urgent frame = %d",
3125 i->frame_number, urgent_frame_number);
3126
3127 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3128 (i->partial_result_cnt == 0)) {
3129 LOGE("Error: HAL missed urgent metadata for frame number %d",
3130 i->frame_number);
3131 }
3132
3133 if (i->frame_number == urgent_frame_number &&
3134 i->bUrgentReceived == 0) {
3135
3136 camera3_capture_result_t result;
3137 memset(&result, 0, sizeof(camera3_capture_result_t));
3138
3139 i->partial_result_cnt++;
3140 i->bUrgentReceived = 1;
3141 // Extract 3A metadata
3142 result.result =
3143 translateCbUrgentMetadataToResultMetadata(metadata);
3144 // Populate metadata result
3145 result.frame_number = urgent_frame_number;
3146 result.num_output_buffers = 0;
3147 result.output_buffers = NULL;
3148 result.partial_result = i->partial_result_cnt;
3149
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003150 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003151 LOGD("urgent frame_number = %u, capture_time = %lld",
3152 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003153 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3154 // Instant AEC settled for this frame.
3155 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3156 mInstantAECSettledFrameNumber = urgent_frame_number;
3157 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003158 free_camera_metadata((camera_metadata_t *)result.result);
3159 break;
3160 }
3161 }
3162 }
3163
3164 if (!frame_number_valid) {
3165 LOGD("Not a valid normal frame number, used as SOF only");
3166 if (free_and_bufdone_meta_buf) {
3167 mMetadataChannel->bufDone(metadata_buf);
3168 free(metadata_buf);
3169 }
3170 goto done_metadata;
3171 }
3172 LOGH("valid frame_number = %u, capture_time = %lld",
3173 frame_number, capture_time);
3174
3175 for (pendingRequestIterator i = mPendingRequestsList.begin();
3176 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
3177 // Flush out all entries with less or equal frame numbers.
3178
3179 camera3_capture_result_t result;
3180 memset(&result, 0, sizeof(camera3_capture_result_t));
3181
3182 LOGD("frame_number in the list is %u", i->frame_number);
3183 i->partial_result_cnt++;
3184 result.partial_result = i->partial_result_cnt;
3185
3186 // Check whether any stream buffer corresponding to this is dropped or not
3187 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003188 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3189 if (p_cam_frame_drop ||
3190 (mInstantAEC || i->frame_number < mInstantAECSettledFrameNumber)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003191 /* Clear notify_msg structure */
3192 camera3_notify_msg_t notify_msg;
3193 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3194 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3195 j != i->buffers.end(); j++) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003196 bool dropFrame = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003197 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
3198 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003199 if (p_cam_frame_drop) {
3200 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003201 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003202 // Got the stream ID for drop frame.
3203 dropFrame = true;
3204 break;
3205 }
3206 }
3207 } else {
3208 // This is instant AEC case.
3209 // For instant AEC drop the stream untill AEC is settled.
3210 dropFrame = true;
3211 }
3212 if (dropFrame) {
3213 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3214 if (p_cam_frame_drop) {
3215 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003216 LOGE("Start of reporting error frame#=%u, streamID=%u",
3217 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003218 } else {
3219 // For instant AEC, inform frame drop and frame number
3220 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3221 "AEC settled frame number = %u",
3222 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3223 }
3224 notify_msg.type = CAMERA3_MSG_ERROR;
3225 notify_msg.message.error.frame_number = i->frame_number;
3226 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
3227 notify_msg.message.error.error_stream = j->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003228 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003229 if (p_cam_frame_drop) {
3230 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003231 LOGE("End of reporting error frame#=%u, streamID=%u",
3232 i->frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003233 } else {
3234 // For instant AEC, inform frame drop and frame number
3235 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3236 "AEC settled frame number = %u",
3237 i->frame_number, streamID, mInstantAECSettledFrameNumber);
3238 }
3239 PendingFrameDropInfo PendingFrameDrop;
3240 PendingFrameDrop.frame_number=i->frame_number;
3241 PendingFrameDrop.stream_ID = streamID;
3242 // Add the Frame drop info to mPendingFrameDropList
3243 mPendingFrameDropList.push_back(PendingFrameDrop);
3244 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003245 }
3246 }
3247
3248 // Send empty metadata with already filled buffers for dropped metadata
3249 // and send valid metadata with already filled buffers for current metadata
3250 /* we could hit this case when we either
3251 * 1. have a pending reprocess request or
3252 * 2. miss a metadata buffer callback */
3253 if (i->frame_number < frame_number) {
3254 if (i->input_buffer) {
3255 /* this will be handled in handleInputBufferWithLock */
3256 i++;
3257 continue;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003258 } else {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003259
3260 mPendingLiveRequest--;
3261
3262 CameraMetadata dummyMetadata;
3263 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3264 result.result = dummyMetadata.release();
3265
3266 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003267 }
3268 } else {
3269 mPendingLiveRequest--;
3270 /* Clear notify_msg structure */
3271 camera3_notify_msg_t notify_msg;
3272 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3273
3274 // Send shutter notify to frameworks
3275 notify_msg.type = CAMERA3_MSG_SHUTTER;
3276 notify_msg.message.shutter.frame_number = i->frame_number;
3277 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003278 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003279
3280 i->timestamp = capture_time;
3281
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003282 /* Set the timestamp in display metadata so that clients aware of
3283 private_handle such as VT can use this un-modified timestamps.
3284 Camera framework is unaware of this timestamp and cannot change this */
3285 updateTimeStampInPendingBuffers(i->frame_number, i->timestamp);
3286
Thierry Strudel3d639192016-09-09 11:52:26 -07003287 // Find channel requiring metadata, meaning internal offline postprocess
3288 // is needed.
3289 //TODO: for now, we don't support two streams requiring metadata at the same time.
3290 // (because we are not making copies, and metadata buffer is not reference counted.
3291 bool internalPproc = false;
3292 for (pendingBufferIterator iter = i->buffers.begin();
3293 iter != i->buffers.end(); iter++) {
3294 if (iter->need_metadata) {
3295 internalPproc = true;
3296 QCamera3ProcessingChannel *channel =
3297 (QCamera3ProcessingChannel *)iter->stream->priv;
3298 channel->queueReprocMetadata(metadata_buf);
3299 break;
3300 }
3301 }
3302
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003303 for (auto itr = i->internalRequestList.begin();
3304 itr != i->internalRequestList.end(); itr++) {
3305 if (itr->need_metadata) {
3306 internalPproc = true;
3307 QCamera3ProcessingChannel *channel =
3308 (QCamera3ProcessingChannel *)itr->stream->priv;
3309 channel->queueReprocMetadata(metadata_buf);
3310 break;
3311 }
3312 }
3313
3314
Thierry Strudel3d639192016-09-09 11:52:26 -07003315 result.result = translateFromHalMetadata(metadata,
3316 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Samuel Ha68ba5172016-12-15 18:41:12 -08003317 i->capture_intent,
3318 /* DevCamDebug metadata translateFromHalMetadata function call*/
3319 i->DevCamDebug_meta_enable,
3320 /* DevCamDebug metadata end */
3321 internalPproc, i->fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003322 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323
3324 saveExifParams(metadata);
3325
3326 if (i->blob_request) {
3327 {
3328 //Dump tuning metadata if enabled and available
3329 char prop[PROPERTY_VALUE_MAX];
3330 memset(prop, 0, sizeof(prop));
3331 property_get("persist.camera.dumpmetadata", prop, "0");
3332 int32_t enabled = atoi(prop);
3333 if (enabled && metadata->is_tuning_params_valid) {
3334 dumpMetadataToFile(metadata->tuning_params,
3335 mMetaFrameCount,
3336 enabled,
3337 "Snapshot",
3338 frame_number);
3339 }
3340 }
3341 }
3342
3343 if (!internalPproc) {
3344 LOGD("couldn't find need_metadata for this metadata");
3345 // Return metadata buffer
3346 if (free_and_bufdone_meta_buf) {
3347 mMetadataChannel->bufDone(metadata_buf);
3348 free(metadata_buf);
3349 }
3350 }
3351 }
3352 if (!result.result) {
3353 LOGE("metadata is NULL");
3354 }
3355 result.frame_number = i->frame_number;
3356 result.input_buffer = i->input_buffer;
3357 result.num_output_buffers = 0;
3358 result.output_buffers = NULL;
3359 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3360 j != i->buffers.end(); j++) {
3361 if (j->buffer) {
3362 result.num_output_buffers++;
3363 }
3364 }
3365
3366 updateFpsInPreviewBuffer(metadata, i->frame_number);
3367
3368 if (result.num_output_buffers > 0) {
3369 camera3_stream_buffer_t *result_buffers =
3370 new camera3_stream_buffer_t[result.num_output_buffers];
3371 if (result_buffers != NULL) {
3372 size_t result_buffers_idx = 0;
3373 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3374 j != i->buffers.end(); j++) {
3375 if (j->buffer) {
3376 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3377 m != mPendingFrameDropList.end(); m++) {
3378 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3379 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3380 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3381 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3382 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3383 frame_number, streamID);
3384 m = mPendingFrameDropList.erase(m);
3385 break;
3386 }
3387 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003388 j->buffer->status |= mPendingBuffersMap.getBufErrStatus(j->buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003389 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3390 result_buffers[result_buffers_idx++] = *(j->buffer);
3391 free(j->buffer);
3392 j->buffer = NULL;
3393 }
3394 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003395
Thierry Strudel3d639192016-09-09 11:52:26 -07003396 result.output_buffers = result_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003397 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 LOGD("meta frame_number = %u, capture_time = %lld",
3399 result.frame_number, i->timestamp);
3400 free_camera_metadata((camera_metadata_t *)result.result);
3401 delete[] result_buffers;
3402 }else {
3403 LOGE("Fatal error: out of memory");
3404 }
3405 } else {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003406 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003407 LOGD("meta frame_number = %u, capture_time = %lld",
3408 result.frame_number, i->timestamp);
3409 free_camera_metadata((camera_metadata_t *)result.result);
3410 }
3411
3412 i = erasePendingRequest(i);
3413
3414 if (!mPendingReprocessResultList.empty()) {
3415 handlePendingReprocResults(frame_number + 1);
3416 }
3417 }
3418
3419done_metadata:
3420 for (pendingRequestIterator i = mPendingRequestsList.begin();
3421 i != mPendingRequestsList.end() ;i++) {
3422 i->pipeline_depth++;
3423 }
3424 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3425 unblockRequestIfNecessary();
3426}
3427
3428/*===========================================================================
3429 * FUNCTION : hdrPlusPerfLock
3430 *
3431 * DESCRIPTION: perf lock for HDR+ using custom intent
3432 *
3433 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3434 *
3435 * RETURN : None
3436 *
3437 *==========================================================================*/
3438void QCamera3HardwareInterface::hdrPlusPerfLock(
3439 mm_camera_super_buf_t *metadata_buf)
3440{
3441 if (NULL == metadata_buf) {
3442 LOGE("metadata_buf is NULL");
3443 return;
3444 }
3445 metadata_buffer_t *metadata =
3446 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3447 int32_t *p_frame_number_valid =
3448 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3449 uint32_t *p_frame_number =
3450 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3451
3452 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3453 LOGE("%s: Invalid metadata", __func__);
3454 return;
3455 }
3456
3457 //acquire perf lock for 5 sec after the last HDR frame is captured
3458 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3459 if ((p_frame_number != NULL) &&
3460 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003461 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003462 }
3463 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003464}
3465
3466/*===========================================================================
3467 * FUNCTION : handleInputBufferWithLock
3468 *
3469 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3470 *
3471 * PARAMETERS : @frame_number: frame number of the input buffer
3472 *
3473 * RETURN :
3474 *
3475 *==========================================================================*/
3476void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3477{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003478 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003479 pendingRequestIterator i = mPendingRequestsList.begin();
3480 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3481 i++;
3482 }
3483 if (i != mPendingRequestsList.end() && i->input_buffer) {
3484 //found the right request
3485 if (!i->shutter_notified) {
3486 CameraMetadata settings;
3487 camera3_notify_msg_t notify_msg;
3488 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3489 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3490 if(i->settings) {
3491 settings = i->settings;
3492 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3493 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3494 } else {
3495 LOGE("No timestamp in input settings! Using current one.");
3496 }
3497 } else {
3498 LOGE("Input settings missing!");
3499 }
3500
3501 notify_msg.type = CAMERA3_MSG_SHUTTER;
3502 notify_msg.message.shutter.frame_number = frame_number;
3503 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003504 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003505 i->shutter_notified = true;
3506 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3507 i->frame_number, notify_msg.message.shutter.timestamp);
3508 }
3509
3510 if (i->input_buffer->release_fence != -1) {
3511 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3512 close(i->input_buffer->release_fence);
3513 if (rc != OK) {
3514 LOGE("input buffer sync wait failed %d", rc);
3515 }
3516 }
3517
3518 camera3_capture_result result;
3519 memset(&result, 0, sizeof(camera3_capture_result));
3520 result.frame_number = frame_number;
3521 result.result = i->settings;
3522 result.input_buffer = i->input_buffer;
3523 result.partial_result = PARTIAL_RESULT_COUNT;
3524
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003525 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003526 LOGD("Input request metadata and input buffer frame_number = %u",
3527 i->frame_number);
3528 i = erasePendingRequest(i);
3529 } else {
3530 LOGE("Could not find input request for frame number %d", frame_number);
3531 }
3532}
3533
3534/*===========================================================================
3535 * FUNCTION : handleBufferWithLock
3536 *
3537 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @buffer: image buffer for the callback
3540 * @frame_number: frame number of the image buffer
3541 *
3542 * RETURN :
3543 *
3544 *==========================================================================*/
3545void QCamera3HardwareInterface::handleBufferWithLock(
3546 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3547{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003548 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003549
3550 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3551 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3552 }
3553
Thierry Strudel3d639192016-09-09 11:52:26 -07003554 /* Nothing to be done during error state */
3555 if ((ERROR == mState) || (DEINIT == mState)) {
3556 return;
3557 }
3558 if (mFlushPerf) {
3559 handleBuffersDuringFlushLock(buffer);
3560 return;
3561 }
3562 //not in flush
3563 // If the frame number doesn't exist in the pending request list,
3564 // directly send the buffer to the frameworks, and update pending buffers map
3565 // Otherwise, book-keep the buffer.
3566 pendingRequestIterator i = mPendingRequestsList.begin();
3567 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3568 i++;
3569 }
3570 if (i == mPendingRequestsList.end()) {
3571 // Verify all pending requests frame_numbers are greater
3572 for (pendingRequestIterator j = mPendingRequestsList.begin();
3573 j != mPendingRequestsList.end(); j++) {
3574 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3575 LOGW("Error: pending live frame number %d is smaller than %d",
3576 j->frame_number, frame_number);
3577 }
3578 }
3579 camera3_capture_result_t result;
3580 memset(&result, 0, sizeof(camera3_capture_result_t));
3581 result.result = NULL;
3582 result.frame_number = frame_number;
3583 result.num_output_buffers = 1;
3584 result.partial_result = 0;
3585 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3586 m != mPendingFrameDropList.end(); m++) {
3587 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3588 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3589 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3590 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3591 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3592 frame_number, streamID);
3593 m = mPendingFrameDropList.erase(m);
3594 break;
3595 }
3596 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003597 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003598 result.output_buffers = buffer;
3599 LOGH("result frame_number = %d, buffer = %p",
3600 frame_number, buffer->buffer);
3601
3602 mPendingBuffersMap.removeBuf(buffer->buffer);
3603
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003604 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 } else {
3606 if (i->input_buffer) {
3607 CameraMetadata settings;
3608 camera3_notify_msg_t notify_msg;
3609 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3610 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3611 if(i->settings) {
3612 settings = i->settings;
3613 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3614 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3615 } else {
3616 LOGW("No timestamp in input settings! Using current one.");
3617 }
3618 } else {
3619 LOGE("Input settings missing!");
3620 }
3621
3622 notify_msg.type = CAMERA3_MSG_SHUTTER;
3623 notify_msg.message.shutter.frame_number = frame_number;
3624 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3625
3626 if (i->input_buffer->release_fence != -1) {
3627 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3628 close(i->input_buffer->release_fence);
3629 if (rc != OK) {
3630 LOGE("input buffer sync wait failed %d", rc);
3631 }
3632 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003633 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 mPendingBuffersMap.removeBuf(buffer->buffer);
3635
Thierry Strudel04e026f2016-10-10 11:27:36 -07003636 camera3_capture_result result;
3637 memset(&result, 0, sizeof(camera3_capture_result));
3638 result.frame_number = frame_number;
3639 result.result = i->settings;
3640 result.input_buffer = i->input_buffer;
3641 result.num_output_buffers = 1;
3642 result.output_buffers = buffer;
3643 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003644
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003645 orchestrateNotify(&notify_msg);
3646 orchestrateResult(&result);
Thierry Strudel04e026f2016-10-10 11:27:36 -07003647 LOGD("Notify reprocess now %d!", frame_number);
3648 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003649 } else {
3650 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3651 j != i->buffers.end(); j++) {
3652 if (j->stream == buffer->stream) {
3653 if (j->buffer != NULL) {
3654 LOGE("Error: buffer is already set");
3655 } else {
3656 j->buffer = (camera3_stream_buffer_t *)malloc(
3657 sizeof(camera3_stream_buffer_t));
3658 *(j->buffer) = *buffer;
3659 LOGH("cache buffer %p at result frame_number %u",
3660 buffer->buffer, frame_number);
3661 }
3662 }
3663 }
3664 }
3665 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003666
3667 if (mPreviewStarted == false) {
3668 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3669 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3670 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3671 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3672 mPreviewStarted = true;
3673
3674 // Set power hint for preview
3675 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3676 }
3677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003678}
3679
3680/*===========================================================================
3681 * FUNCTION : unblockRequestIfNecessary
3682 *
3683 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3684 * that mMutex is held when this function is called.
3685 *
3686 * PARAMETERS :
3687 *
3688 * RETURN :
3689 *
3690 *==========================================================================*/
3691void QCamera3HardwareInterface::unblockRequestIfNecessary()
3692{
3693 // Unblock process_capture_request
3694 pthread_cond_signal(&mRequestCond);
3695}
3696
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003697/*===========================================================================
3698 * FUNCTION : isHdrSnapshotRequest
3699 *
3700 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3701 *
3702 * PARAMETERS : camera3 request structure
3703 *
3704 * RETURN : boolean decision variable
3705 *
3706 *==========================================================================*/
3707bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3708{
3709 if (request == NULL) {
3710 LOGE("Invalid request handle");
3711 assert(0);
3712 return false;
3713 }
3714
3715 if (!mForceHdrSnapshot) {
3716 CameraMetadata frame_settings;
3717 frame_settings = request->settings;
3718
3719 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3720 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3721 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3722 return false;
3723 }
3724 } else {
3725 return false;
3726 }
3727
3728 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3729 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3730 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3731 return false;
3732 }
3733 } else {
3734 return false;
3735 }
3736 }
3737
3738 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3739 if (request->output_buffers[i].stream->format
3740 == HAL_PIXEL_FORMAT_BLOB) {
3741 return true;
3742 }
3743 }
3744
3745 return false;
3746}
3747/*===========================================================================
3748 * FUNCTION : orchestrateRequest
3749 *
3750 * DESCRIPTION: Orchestrates a capture request from camera service
3751 *
3752 * PARAMETERS :
3753 * @request : request from framework to process
3754 *
3755 * RETURN : Error status codes
3756 *
3757 *==========================================================================*/
3758int32_t QCamera3HardwareInterface::orchestrateRequest(
3759 camera3_capture_request_t *request)
3760{
3761
3762 uint32_t originalFrameNumber = request->frame_number;
3763 uint32_t originalOutputCount = request->num_output_buffers;
3764 const camera_metadata_t *original_settings = request->settings;
3765 List<InternalRequest> internallyRequestedStreams;
3766 List<InternalRequest> emptyInternalList;
3767
3768 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3769 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3770 uint32_t internalFrameNumber;
3771 CameraMetadata modified_meta;
3772
3773
3774 /* Add Blob channel to list of internally requested streams */
3775 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3776 if (request->output_buffers[i].stream->format
3777 == HAL_PIXEL_FORMAT_BLOB) {
3778 InternalRequest streamRequested;
3779 streamRequested.meteringOnly = 1;
3780 streamRequested.need_metadata = 0;
3781 streamRequested.stream = request->output_buffers[i].stream;
3782 internallyRequestedStreams.push_back(streamRequested);
3783 }
3784 }
3785 request->num_output_buffers = 0;
3786 auto itr = internallyRequestedStreams.begin();
3787
3788 /* Modify setting to set compensation */
3789 modified_meta = request->settings;
3790 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3791 uint8_t aeLock = 1;
3792 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3793 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3794 camera_metadata_t *modified_settings = modified_meta.release();
3795 request->settings = modified_settings;
3796
3797 /* Capture Settling & -2x frame */
3798 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3799 request->frame_number = internalFrameNumber;
3800 processCaptureRequest(request, internallyRequestedStreams);
3801
3802 request->num_output_buffers = originalOutputCount;
3803 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3804 request->frame_number = internalFrameNumber;
3805 processCaptureRequest(request, emptyInternalList);
3806 request->num_output_buffers = 0;
3807
3808 modified_meta = modified_settings;
3809 expCompensation = 0;
3810 aeLock = 1;
3811 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3812 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3813 modified_settings = modified_meta.release();
3814 request->settings = modified_settings;
3815
3816 /* Capture Settling & 0X frame */
3817
3818 itr = internallyRequestedStreams.begin();
3819 if (itr == internallyRequestedStreams.end()) {
3820 LOGE("Error Internally Requested Stream list is empty");
3821 assert(0);
3822 } else {
3823 itr->need_metadata = 0;
3824 itr->meteringOnly = 1;
3825 }
3826
3827 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3828 request->frame_number = internalFrameNumber;
3829 processCaptureRequest(request, internallyRequestedStreams);
3830
3831 itr = internallyRequestedStreams.begin();
3832 if (itr == internallyRequestedStreams.end()) {
3833 ALOGE("Error Internally Requested Stream list is empty");
3834 assert(0);
3835 } else {
3836 itr->need_metadata = 1;
3837 itr->meteringOnly = 0;
3838 }
3839
3840 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3841 request->frame_number = internalFrameNumber;
3842 processCaptureRequest(request, internallyRequestedStreams);
3843
3844 /* Capture 2X frame*/
3845 modified_meta = modified_settings;
3846 expCompensation = GB_HDR_2X_STEP_EV;
3847 aeLock = 1;
3848 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3849 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3850 modified_settings = modified_meta.release();
3851 request->settings = modified_settings;
3852
3853 itr = internallyRequestedStreams.begin();
3854 if (itr == internallyRequestedStreams.end()) {
3855 ALOGE("Error Internally Requested Stream list is empty");
3856 assert(0);
3857 } else {
3858 itr->need_metadata = 0;
3859 itr->meteringOnly = 1;
3860 }
3861 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3862 request->frame_number = internalFrameNumber;
3863 processCaptureRequest(request, internallyRequestedStreams);
3864
3865 itr = internallyRequestedStreams.begin();
3866 if (itr == internallyRequestedStreams.end()) {
3867 ALOGE("Error Internally Requested Stream list is empty");
3868 assert(0);
3869 } else {
3870 itr->need_metadata = 1;
3871 itr->meteringOnly = 0;
3872 }
3873
3874 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3875 request->frame_number = internalFrameNumber;
3876 processCaptureRequest(request, internallyRequestedStreams);
3877
3878
3879 /* Capture 2X on original streaming config*/
3880 internallyRequestedStreams.clear();
3881
3882 /* Restore original settings pointer */
3883 request->settings = original_settings;
3884 } else {
3885 uint32_t internalFrameNumber;
3886 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3887 request->frame_number = internalFrameNumber;
3888 return processCaptureRequest(request, internallyRequestedStreams);
3889 }
3890
3891 return NO_ERROR;
3892}
3893
3894/*===========================================================================
3895 * FUNCTION : orchestrateResult
3896 *
3897 * DESCRIPTION: Orchestrates a capture result to camera service
3898 *
3899 * PARAMETERS :
3900 * @request : request from framework to process
3901 *
3902 * RETURN :
3903 *
3904 *==========================================================================*/
3905void QCamera3HardwareInterface::orchestrateResult(
3906 camera3_capture_result_t *result)
3907{
3908 uint32_t frameworkFrameNumber;
3909 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3910 frameworkFrameNumber);
3911 if (rc != NO_ERROR) {
3912 LOGE("Cannot find translated frameworkFrameNumber");
3913 assert(0);
3914 } else {
3915 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3916 LOGD("CAM_DEBUG Internal Request drop the result");
3917 } else {
3918 result->frame_number = frameworkFrameNumber;
3919 mCallbackOps->process_capture_result(mCallbackOps, result);
3920 }
3921 }
3922}
3923
3924/*===========================================================================
3925 * FUNCTION : orchestrateNotify
3926 *
3927 * DESCRIPTION: Orchestrates a notify to camera service
3928 *
3929 * PARAMETERS :
3930 * @request : request from framework to process
3931 *
3932 * RETURN :
3933 *
3934 *==========================================================================*/
3935void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3936{
3937 uint32_t frameworkFrameNumber;
3938 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3939 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3940 frameworkFrameNumber);
3941 if (rc != NO_ERROR) {
3942 LOGE("Cannot find translated frameworkFrameNumber");
3943 assert(0);
3944 } else {
3945 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3946 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3947 } else {
3948 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3949 mCallbackOps->notify(mCallbackOps, notify_msg);
3950 }
3951 }
3952}
3953
3954/*===========================================================================
3955 * FUNCTION : FrameNumberRegistry
3956 *
3957 * DESCRIPTION: Constructor
3958 *
3959 * PARAMETERS :
3960 *
3961 * RETURN :
3962 *
3963 *==========================================================================*/
3964FrameNumberRegistry::FrameNumberRegistry()
3965{
3966 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3967}
3968
3969/*===========================================================================
3970 * FUNCTION : ~FrameNumberRegistry
3971 *
3972 * DESCRIPTION: Destructor
3973 *
3974 * PARAMETERS :
3975 *
3976 * RETURN :
3977 *
3978 *==========================================================================*/
3979FrameNumberRegistry::~FrameNumberRegistry()
3980{
3981}
3982
3983/*===========================================================================
3984 * FUNCTION : PurgeOldEntriesLocked
3985 *
3986 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
3987 *
3988 * PARAMETERS :
3989 *
3990 * RETURN : NONE
3991 *
3992 *==========================================================================*/
3993void FrameNumberRegistry::purgeOldEntriesLocked()
3994{
3995 while (_register.begin() != _register.end()) {
3996 auto itr = _register.begin();
3997 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
3998 _register.erase(itr);
3999 } else {
4000 return;
4001 }
4002 }
4003}
4004
4005/*===========================================================================
4006 * FUNCTION : allocStoreInternalFrameNumber
4007 *
4008 * DESCRIPTION: Method to note down a framework request and associate a new
4009 * internal request number against it
4010 *
4011 * PARAMETERS :
4012 * @fFrameNumber: Identifier given by framework
4013 * @internalFN : Output parameter which will have the newly generated internal
4014 * entry
4015 *
4016 * RETURN : Error code
4017 *
4018 *==========================================================================*/
4019int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4020 uint32_t &internalFrameNumber)
4021{
4022 Mutex::Autolock lock(mRegistryLock);
4023 internalFrameNumber = _nextFreeInternalNumber++;
4024 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4025 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4026 purgeOldEntriesLocked();
4027 return NO_ERROR;
4028}
4029
4030/*===========================================================================
4031 * FUNCTION : generateStoreInternalFrameNumber
4032 *
4033 * DESCRIPTION: Method to associate a new internal request number independent
4034 * of any associate with framework requests
4035 *
4036 * PARAMETERS :
4037 * @internalFrame#: Output parameter which will have the newly generated internal
4038 *
4039 *
4040 * RETURN : Error code
4041 *
4042 *==========================================================================*/
4043int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4044{
4045 Mutex::Autolock lock(mRegistryLock);
4046 internalFrameNumber = _nextFreeInternalNumber++;
4047 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4048 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4049 purgeOldEntriesLocked();
4050 return NO_ERROR;
4051}
4052
4053/*===========================================================================
4054 * FUNCTION : getFrameworkFrameNumber
4055 *
4056 * DESCRIPTION: Method to query the framework framenumber given an internal #
4057 *
4058 * PARAMETERS :
4059 * @internalFrame#: Internal reference
4060 * @frameworkframenumber: Output parameter holding framework frame entry
4061 *
4062 * RETURN : Error code
4063 *
4064 *==========================================================================*/
4065int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4066 uint32_t &frameworkFrameNumber)
4067{
4068 Mutex::Autolock lock(mRegistryLock);
4069 auto itr = _register.find(internalFrameNumber);
4070 if (itr == _register.end()) {
4071 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
4072 return -ENOENT;
4073 }
4074
4075 frameworkFrameNumber = itr->second;
4076 purgeOldEntriesLocked();
4077 return NO_ERROR;
4078}
Thierry Strudel3d639192016-09-09 11:52:26 -07004079
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004080status_t QCamera3HardwareInterface::fillPbStreamConfig(
4081 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4082 QCamera3Channel *channel, uint32_t streamIndex) {
4083 if (config == nullptr) {
4084 LOGE("%s: config is null", __FUNCTION__);
4085 return BAD_VALUE;
4086 }
4087
4088 if (channel == nullptr) {
4089 LOGE("%s: channel is null", __FUNCTION__);
4090 return BAD_VALUE;
4091 }
4092
4093 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4094 if (stream == nullptr) {
4095 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4096 return NAME_NOT_FOUND;
4097 }
4098
4099 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4100 if (streamInfo == nullptr) {
4101 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4102 return NAME_NOT_FOUND;
4103 }
4104
4105 config->id = pbStreamId;
4106 config->image.width = streamInfo->dim.width;
4107 config->image.height = streamInfo->dim.height;
4108 config->image.padding = 0;
4109 config->image.format = pbStreamFormat;
4110
4111 // Fill plane information.
4112 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4113 pbcamera::PlaneConfiguration plane;
4114 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4115 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4116 config->image.planes.push_back(plane);
4117 }
4118
4119 return OK;
4120}
4121
Thierry Strudel3d639192016-09-09 11:52:26 -07004122/*===========================================================================
4123 * FUNCTION : processCaptureRequest
4124 *
4125 * DESCRIPTION: process a capture request from camera service
4126 *
4127 * PARAMETERS :
4128 * @request : request from framework to process
4129 *
4130 * RETURN :
4131 *
4132 *==========================================================================*/
4133int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004134 camera3_capture_request_t *request,
4135 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004136{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004137 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004138 int rc = NO_ERROR;
4139 int32_t request_id;
4140 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004141 bool isVidBufRequested = false;
4142 camera3_stream_buffer_t *pInputBuffer = NULL;
4143
4144 pthread_mutex_lock(&mMutex);
4145
4146 // Validate current state
4147 switch (mState) {
4148 case CONFIGURED:
4149 case STARTED:
4150 /* valid state */
4151 break;
4152
4153 case ERROR:
4154 pthread_mutex_unlock(&mMutex);
4155 handleCameraDeviceError();
4156 return -ENODEV;
4157
4158 default:
4159 LOGE("Invalid state %d", mState);
4160 pthread_mutex_unlock(&mMutex);
4161 return -ENODEV;
4162 }
4163
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004164 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004165 if (rc != NO_ERROR) {
4166 LOGE("incoming request is not valid");
4167 pthread_mutex_unlock(&mMutex);
4168 return rc;
4169 }
4170
4171 meta = request->settings;
4172
4173 // For first capture request, send capture intent, and
4174 // stream on all streams
4175 if (mState == CONFIGURED) {
4176 // send an unconfigure to the backend so that the isp
4177 // resources are deallocated
4178 if (!mFirstConfiguration) {
4179 cam_stream_size_info_t stream_config_info;
4180 int32_t hal_version = CAM_HAL_V3;
4181 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4182 stream_config_info.buffer_info.min_buffers =
4183 MIN_INFLIGHT_REQUESTS;
4184 stream_config_info.buffer_info.max_buffers =
4185 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4186 clear_metadata_buffer(mParameters);
4187 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4188 CAM_INTF_PARM_HAL_VERSION, hal_version);
4189 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4190 CAM_INTF_META_STREAM_INFO, stream_config_info);
4191 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4192 mParameters);
4193 if (rc < 0) {
4194 LOGE("set_parms for unconfigure failed");
4195 pthread_mutex_unlock(&mMutex);
4196 return rc;
4197 }
4198 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004199 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004200 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004201 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004202 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004203 property_get("persist.camera.is_type", is_type_value, "4");
4204 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4205 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4206 property_get("persist.camera.is_type_preview", is_type_value, "4");
4207 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4208 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004209
4210 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4211 int32_t hal_version = CAM_HAL_V3;
4212 uint8_t captureIntent =
4213 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4214 mCaptureIntent = captureIntent;
4215 clear_metadata_buffer(mParameters);
4216 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4217 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4218 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004219 if (mFirstConfiguration) {
4220 // configure instant AEC
4221 // Instant AEC is a session based parameter and it is needed only
4222 // once per complete session after open camera.
4223 // i.e. This is set only once for the first capture request, after open camera.
4224 setInstantAEC(meta);
4225 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004226 uint8_t fwkVideoStabMode=0;
4227 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4228 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4229 }
4230
4231 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4232 // turn it on for video/preview
4233 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4234 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004235 int32_t vsMode;
4236 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4238 rc = BAD_VALUE;
4239 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004240 LOGD("setEis %d", setEis);
4241 bool eis3Supported = false;
4242 size_t count = IS_TYPE_MAX;
4243 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4244 for (size_t i = 0; i < count; i++) {
4245 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4246 eis3Supported = true;
4247 break;
4248 }
4249 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004250
4251 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004252 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004253 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4254 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004255 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4256 is_type = isTypePreview;
4257 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4258 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4259 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004260 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004261 } else {
4262 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004263 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004264 } else {
4265 is_type = IS_TYPE_NONE;
4266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004267 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004268 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004269 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4270 }
4271 }
4272
4273 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4274 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4275
4276 int32_t tintless_value = 1;
4277 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4278 CAM_INTF_PARM_TINTLESS, tintless_value);
4279 //Disable CDS for HFR mode or if DIS/EIS is on.
4280 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4281 //after every configure_stream
4282 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4283 (m_bIsVideo)) {
4284 int32_t cds = CAM_CDS_MODE_OFF;
4285 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4286 CAM_INTF_PARM_CDS_MODE, cds))
4287 LOGE("Failed to disable CDS for HFR mode");
4288
4289 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004290
4291 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4292 uint8_t* use_av_timer = NULL;
4293
4294 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004295 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004296 use_av_timer = &m_debug_avtimer;
4297 }
4298 else{
4299 use_av_timer =
4300 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004301 if (use_av_timer) {
4302 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4303 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004304 }
4305
4306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4307 rc = BAD_VALUE;
4308 }
4309 }
4310
Thierry Strudel3d639192016-09-09 11:52:26 -07004311 setMobicat();
4312
4313 /* Set fps and hfr mode while sending meta stream info so that sensor
4314 * can configure appropriate streaming mode */
4315 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004316 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4317 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004318 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4319 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004320 if (rc == NO_ERROR) {
4321 int32_t max_fps =
4322 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004323 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004324 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4325 }
4326 /* For HFR, more buffers are dequeued upfront to improve the performance */
4327 if (mBatchSize) {
4328 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4329 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4330 }
4331 }
4332 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004333 LOGE("setHalFpsRange failed");
4334 }
4335 }
4336 if (meta.exists(ANDROID_CONTROL_MODE)) {
4337 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4338 rc = extractSceneMode(meta, metaMode, mParameters);
4339 if (rc != NO_ERROR) {
4340 LOGE("extractSceneMode failed");
4341 }
4342 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004343 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004344
Thierry Strudel04e026f2016-10-10 11:27:36 -07004345 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4346 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4347 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4348 rc = setVideoHdrMode(mParameters, vhdr);
4349 if (rc != NO_ERROR) {
4350 LOGE("setVideoHDR is failed");
4351 }
4352 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004353
Thierry Strudel3d639192016-09-09 11:52:26 -07004354 //TODO: validate the arguments, HSV scenemode should have only the
4355 //advertised fps ranges
4356
4357 /*set the capture intent, hal version, tintless, stream info,
4358 *and disenable parameters to the backend*/
4359 LOGD("set_parms META_STREAM_INFO " );
4360 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4361 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004362 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004363 mStreamConfigInfo.type[i],
4364 mStreamConfigInfo.stream_sizes[i].width,
4365 mStreamConfigInfo.stream_sizes[i].height,
4366 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004367 mStreamConfigInfo.format[i],
4368 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004369 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004370
Thierry Strudel3d639192016-09-09 11:52:26 -07004371 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4372 mParameters);
4373 if (rc < 0) {
4374 LOGE("set_parms failed for hal version, stream info");
4375 }
4376
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004377 cam_sensor_mode_info_t sensor_mode_info;
4378 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4379 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004380 if (rc != NO_ERROR) {
4381 LOGE("Failed to get sensor output size");
4382 pthread_mutex_unlock(&mMutex);
4383 goto error_exit;
4384 }
4385
4386 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4387 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004388 sensor_mode_info.active_array_size.width,
4389 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004390
4391 /* Set batchmode before initializing channel. Since registerBuffer
4392 * internally initializes some of the channels, better set batchmode
4393 * even before first register buffer */
4394 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4395 it != mStreamInfo.end(); it++) {
4396 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4397 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4398 && mBatchSize) {
4399 rc = channel->setBatchSize(mBatchSize);
4400 //Disable per frame map unmap for HFR/batchmode case
4401 rc |= channel->setPerFrameMapUnmap(false);
4402 if (NO_ERROR != rc) {
4403 LOGE("Channel init failed %d", rc);
4404 pthread_mutex_unlock(&mMutex);
4405 goto error_exit;
4406 }
4407 }
4408 }
4409
4410 //First initialize all streams
4411 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4412 it != mStreamInfo.end(); it++) {
4413 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4414 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4415 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004416 setEis) {
4417 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4418 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4419 is_type = mStreamConfigInfo.is_type[i];
4420 break;
4421 }
4422 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004423 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004424 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004425 rc = channel->initialize(IS_TYPE_NONE);
4426 }
4427 if (NO_ERROR != rc) {
4428 LOGE("Channel initialization failed %d", rc);
4429 pthread_mutex_unlock(&mMutex);
4430 goto error_exit;
4431 }
4432 }
4433
4434 if (mRawDumpChannel) {
4435 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4436 if (rc != NO_ERROR) {
4437 LOGE("Error: Raw Dump Channel init failed");
4438 pthread_mutex_unlock(&mMutex);
4439 goto error_exit;
4440 }
4441 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004442 if (mHdrPlusRawSrcChannel) {
4443 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4444 if (rc != NO_ERROR) {
4445 LOGE("Error: HDR+ RAW Source Channel init failed");
4446 pthread_mutex_unlock(&mMutex);
4447 goto error_exit;
4448 }
4449 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004450 if (mSupportChannel) {
4451 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4452 if (rc < 0) {
4453 LOGE("Support channel initialization failed");
4454 pthread_mutex_unlock(&mMutex);
4455 goto error_exit;
4456 }
4457 }
4458 if (mAnalysisChannel) {
4459 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4460 if (rc < 0) {
4461 LOGE("Analysis channel initialization failed");
4462 pthread_mutex_unlock(&mMutex);
4463 goto error_exit;
4464 }
4465 }
4466 if (mDummyBatchChannel) {
4467 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4468 if (rc < 0) {
4469 LOGE("mDummyBatchChannel setBatchSize failed");
4470 pthread_mutex_unlock(&mMutex);
4471 goto error_exit;
4472 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004473 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004474 if (rc < 0) {
4475 LOGE("mDummyBatchChannel initialization failed");
4476 pthread_mutex_unlock(&mMutex);
4477 goto error_exit;
4478 }
4479 }
4480
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004481 if (mHdrPlusClient != nullptr) {
4482 pbcamera::InputConfiguration inputConfig;
4483 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
4484
4485 // Configure HDR+ client streams.
4486 // Get input config.
4487 if (mHdrPlusRawSrcChannel) {
4488 // HDR+ input buffers will be provided by HAL.
4489 rc = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
4490 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
4491 if (rc != OK) {
4492 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream.",
4493 __FUNCTION__);
4494 pthread_mutex_unlock(&mMutex);
4495 goto error_exit;
4496 }
4497
4498 inputConfig.isSensorInput = false;
4499 } else {
4500 // Sensor MIPI will send data to Easel.
4501 inputConfig.isSensorInput = true;
4502 inputConfig.sensorMode.pixelArrayWidth =
4503 sensor_mode_info.pixel_array_size.width;
4504 inputConfig.sensorMode.pixelArrayHeight =
4505 sensor_mode_info.pixel_array_size.height;
4506 inputConfig.sensorMode.activeArrayWidth =
4507 sensor_mode_info.active_array_size.width;
4508 inputConfig.sensorMode.activeArrayHeight =
4509 sensor_mode_info.active_array_size.height;
4510 inputConfig.sensorMode.outputPixelClkHz =
4511 sensor_mode_info.op_pixel_clk;
4512 }
4513
4514 // Get output configurations.
4515 // Easel may need to output RAW16 buffers if mRawChannel was created.
4516 if (mRawChannel != nullptr) {
4517 pbcamera::StreamConfiguration outputConfig;
4518 rc = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
4519 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
4520 if (rc != OK) {
4521 LOGE("%s: Failed to get fill stream config for raw stream.", __FUNCTION__);
4522 pthread_mutex_unlock(&mMutex);
4523 goto error_exit;
4524 }
4525 outputStreamConfigs.push_back(outputConfig);
4526 }
4527
4528 // Easel may need to output YUV output buffers if mPictureChannel was created.
4529 if (mPictureChannel != nullptr) {
4530 pbcamera::StreamConfiguration outputConfig;
4531 rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
4532 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
4533 if (rc != OK) {
4534 LOGE("%s: Failed to get fill stream config for YUV stream.", __FUNCTION__);
4535 pthread_mutex_unlock(&mMutex);
4536 goto error_exit;
4537 }
4538 outputStreamConfigs.push_back(outputConfig);
4539 }
4540
4541 // TODO: consider other channels for YUV output buffers.
4542
4543 rc = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
4544 if (rc != OK) {
4545 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
4546 strerror(-rc), rc);
4547 pthread_mutex_unlock(&mMutex);
4548 goto error_exit;
4549 }
4550 }
4551
Thierry Strudel3d639192016-09-09 11:52:26 -07004552 // Set bundle info
4553 rc = setBundleInfo();
4554 if (rc < 0) {
4555 LOGE("setBundleInfo failed %d", rc);
4556 pthread_mutex_unlock(&mMutex);
4557 goto error_exit;
4558 }
4559
4560 //update settings from app here
4561 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4562 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4563 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4564 }
4565 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4566 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4567 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4568 }
4569 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4570 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4571 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4572
4573 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4574 (mLinkedCameraId != mCameraId) ) {
4575 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4576 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004577 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004578 goto error_exit;
4579 }
4580 }
4581
4582 // add bundle related cameras
4583 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4584 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004585 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4586 &m_pDualCamCmdPtr->bundle_info;
4587 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004588 if (mIsDeviceLinked)
4589 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4590 else
4591 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4592
4593 pthread_mutex_lock(&gCamLock);
4594
4595 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4596 LOGE("Dualcam: Invalid Session Id ");
4597 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004598 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004599 goto error_exit;
4600 }
4601
4602 if (mIsMainCamera == 1) {
4603 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4604 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004605 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004606 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004607 // related session id should be session id of linked session
4608 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4609 } else {
4610 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4611 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004612 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004613 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004614 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4615 }
4616 pthread_mutex_unlock(&gCamLock);
4617
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004618 rc = mCameraHandle->ops->set_dual_cam_cmd(
4619 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004620 if (rc < 0) {
4621 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004622 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004623 goto error_exit;
4624 }
4625 }
4626
4627 //Then start them.
4628 LOGH("Start META Channel");
4629 rc = mMetadataChannel->start();
4630 if (rc < 0) {
4631 LOGE("META channel start failed");
4632 pthread_mutex_unlock(&mMutex);
4633 goto error_exit;
4634 }
4635
4636 if (mAnalysisChannel) {
4637 rc = mAnalysisChannel->start();
4638 if (rc < 0) {
4639 LOGE("Analysis channel start failed");
4640 mMetadataChannel->stop();
4641 pthread_mutex_unlock(&mMutex);
4642 goto error_exit;
4643 }
4644 }
4645
4646 if (mSupportChannel) {
4647 rc = mSupportChannel->start();
4648 if (rc < 0) {
4649 LOGE("Support channel start failed");
4650 mMetadataChannel->stop();
4651 /* Although support and analysis are mutually exclusive today
4652 adding it in anycase for future proofing */
4653 if (mAnalysisChannel) {
4654 mAnalysisChannel->stop();
4655 }
4656 pthread_mutex_unlock(&mMutex);
4657 goto error_exit;
4658 }
4659 }
4660 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4661 it != mStreamInfo.end(); it++) {
4662 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4663 LOGH("Start Processing Channel mask=%d",
4664 channel->getStreamTypeMask());
4665 rc = channel->start();
4666 if (rc < 0) {
4667 LOGE("channel start failed");
4668 pthread_mutex_unlock(&mMutex);
4669 goto error_exit;
4670 }
4671 }
4672
4673 if (mRawDumpChannel) {
4674 LOGD("Starting raw dump stream");
4675 rc = mRawDumpChannel->start();
4676 if (rc != NO_ERROR) {
4677 LOGE("Error Starting Raw Dump Channel");
4678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4679 it != mStreamInfo.end(); it++) {
4680 QCamera3Channel *channel =
4681 (QCamera3Channel *)(*it)->stream->priv;
4682 LOGH("Stopping Processing Channel mask=%d",
4683 channel->getStreamTypeMask());
4684 channel->stop();
4685 }
4686 if (mSupportChannel)
4687 mSupportChannel->stop();
4688 if (mAnalysisChannel) {
4689 mAnalysisChannel->stop();
4690 }
4691 mMetadataChannel->stop();
4692 pthread_mutex_unlock(&mMutex);
4693 goto error_exit;
4694 }
4695 }
4696
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004697 if (mHdrPlusRawSrcChannel) {
4698 LOGD("Starting HDR+ RAW stream");
4699 rc = mHdrPlusRawSrcChannel->start();
4700 if (rc != NO_ERROR) {
4701 LOGE("Error Starting HDR+ RAW Channel");
4702 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4703 it != mStreamInfo.end(); it++) {
4704 QCamera3Channel *channel =
4705 (QCamera3Channel *)(*it)->stream->priv;
4706 LOGH("Stopping Processing Channel mask=%d",
4707 channel->getStreamTypeMask());
4708 channel->stop();
4709 }
4710 if (mSupportChannel)
4711 mSupportChannel->stop();
4712 if (mAnalysisChannel) {
4713 mAnalysisChannel->stop();
4714 }
4715 if (mRawDumpChannel) {
4716 mRawDumpChannel->stop();
4717 }
4718 mMetadataChannel->stop();
4719 pthread_mutex_unlock(&mMutex);
4720 goto error_exit;
4721 }
4722 }
4723
Thierry Strudel3d639192016-09-09 11:52:26 -07004724 if (mChannelHandle) {
4725
4726 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4727 mChannelHandle);
4728 if (rc != NO_ERROR) {
4729 LOGE("start_channel failed %d", rc);
4730 pthread_mutex_unlock(&mMutex);
4731 goto error_exit;
4732 }
4733 }
4734
4735 goto no_error;
4736error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004737 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 return rc;
4739no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004740 mWokenUpByDaemon = false;
4741 mPendingLiveRequest = 0;
4742 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004743 }
4744
4745 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004746 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004747
4748 if (mFlushPerf) {
4749 //we cannot accept any requests during flush
4750 LOGE("process_capture_request cannot proceed during flush");
4751 pthread_mutex_unlock(&mMutex);
4752 return NO_ERROR; //should return an error
4753 }
4754
4755 if (meta.exists(ANDROID_REQUEST_ID)) {
4756 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4757 mCurrentRequestId = request_id;
4758 LOGD("Received request with id: %d", request_id);
4759 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4760 LOGE("Unable to find request id field, \
4761 & no previous id available");
4762 pthread_mutex_unlock(&mMutex);
4763 return NAME_NOT_FOUND;
4764 } else {
4765 LOGD("Re-using old request id");
4766 request_id = mCurrentRequestId;
4767 }
4768
4769 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4770 request->num_output_buffers,
4771 request->input_buffer,
4772 frameNumber);
4773 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004774 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004775 int blob_request = 0;
4776 uint32_t snapshotStreamId = 0;
4777 for (size_t i = 0; i < request->num_output_buffers; i++) {
4778 const camera3_stream_buffer_t& output = request->output_buffers[i];
4779 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4780
4781 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004782 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 blob_request = 1;
4784 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4785 }
4786
4787 if (output.acquire_fence != -1) {
4788 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4789 close(output.acquire_fence);
4790 if (rc != OK) {
4791 LOGE("sync wait failed %d", rc);
4792 pthread_mutex_unlock(&mMutex);
4793 return rc;
4794 }
4795 }
4796
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004797 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004799
4800 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4801 isVidBufRequested = true;
4802 }
4803 }
4804
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004805 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4806 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4807 itr++) {
4808 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4809 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4810 channel->getStreamID(channel->getStreamTypeMask());
4811
4812 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4813 isVidBufRequested = true;
4814 }
4815 }
4816
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004818 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004819 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 }
4821 if (blob_request && mRawDumpChannel) {
4822 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004823 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004824 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004825 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004826 }
4827
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004828 if (mHdrPlusRawSrcChannel) {
4829 streamsArray.stream_request[streamsArray.num_streams].streamID =
4830 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4831 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4832 }
4833
Thierry Strudel3d639192016-09-09 11:52:26 -07004834 if(request->input_buffer == NULL) {
4835 /* Parse the settings:
4836 * - For every request in NORMAL MODE
4837 * - For every request in HFR mode during preview only case
4838 * - For first request of every batch in HFR mode during video
4839 * recording. In batchmode the same settings except frame number is
4840 * repeated in each request of the batch.
4841 */
4842 if (!mBatchSize ||
4843 (mBatchSize && !isVidBufRequested) ||
4844 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004845 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 if (rc < 0) {
4847 LOGE("fail to set frame parameters");
4848 pthread_mutex_unlock(&mMutex);
4849 return rc;
4850 }
4851 }
4852 /* For batchMode HFR, setFrameParameters is not called for every
4853 * request. But only frame number of the latest request is parsed.
4854 * Keep track of first and last frame numbers in a batch so that
4855 * metadata for the frame numbers of batch can be duplicated in
4856 * handleBatchMetadta */
4857 if (mBatchSize) {
4858 if (!mToBeQueuedVidBufs) {
4859 //start of the batch
4860 mFirstFrameNumberInBatch = request->frame_number;
4861 }
4862 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4863 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4864 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004865 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 return BAD_VALUE;
4867 }
4868 }
4869 if (mNeedSensorRestart) {
4870 /* Unlock the mutex as restartSensor waits on the channels to be
4871 * stopped, which in turn calls stream callback functions -
4872 * handleBufferWithLock and handleMetadataWithLock */
4873 pthread_mutex_unlock(&mMutex);
4874 rc = dynamicUpdateMetaStreamInfo();
4875 if (rc != NO_ERROR) {
4876 LOGE("Restarting the sensor failed");
4877 return BAD_VALUE;
4878 }
4879 mNeedSensorRestart = false;
4880 pthread_mutex_lock(&mMutex);
4881 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004882 if(mResetInstantAEC) {
4883 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4884 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4885 mResetInstantAEC = false;
4886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004887 } else {
4888
4889 if (request->input_buffer->acquire_fence != -1) {
4890 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4891 close(request->input_buffer->acquire_fence);
4892 if (rc != OK) {
4893 LOGE("input buffer sync wait failed %d", rc);
4894 pthread_mutex_unlock(&mMutex);
4895 return rc;
4896 }
4897 }
4898 }
4899
4900 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4901 mLastCustIntentFrmNum = frameNumber;
4902 }
4903 /* Update pending request list and pending buffers map */
4904 PendingRequestInfo pendingRequest;
4905 pendingRequestIterator latestRequest;
4906 pendingRequest.frame_number = frameNumber;
4907 pendingRequest.num_buffers = request->num_output_buffers;
4908 pendingRequest.request_id = request_id;
4909 pendingRequest.blob_request = blob_request;
4910 pendingRequest.timestamp = 0;
4911 pendingRequest.bUrgentReceived = 0;
4912 if (request->input_buffer) {
4913 pendingRequest.input_buffer =
4914 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4915 *(pendingRequest.input_buffer) = *(request->input_buffer);
4916 pInputBuffer = pendingRequest.input_buffer;
4917 } else {
4918 pendingRequest.input_buffer = NULL;
4919 pInputBuffer = NULL;
4920 }
4921
4922 pendingRequest.pipeline_depth = 0;
4923 pendingRequest.partial_result_cnt = 0;
4924 extractJpegMetadata(mCurJpegMeta, request);
4925 pendingRequest.jpegMetadata = mCurJpegMeta;
4926 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4927 pendingRequest.shutter_notified = false;
4928
4929 //extract capture intent
4930 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4931 mCaptureIntent =
4932 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4933 }
4934 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08004935 /* DevCamDebug metadata processCaptureRequest */
4936 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
4937 mDevCamDebugMetaEnable =
4938 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
4939 }
4940 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
4941 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07004942
4943 //extract CAC info
4944 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4945 mCacMode =
4946 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4947 }
4948 pendingRequest.fwkCacMode = mCacMode;
4949
4950 PendingBuffersInRequest bufsForCurRequest;
4951 bufsForCurRequest.frame_number = frameNumber;
4952 // Mark current timestamp for the new request
4953 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4954
4955 for (size_t i = 0; i < request->num_output_buffers; i++) {
4956 RequestedBufferInfo requestedBuf;
4957 memset(&requestedBuf, 0, sizeof(requestedBuf));
4958 requestedBuf.stream = request->output_buffers[i].stream;
4959 requestedBuf.buffer = NULL;
4960 pendingRequest.buffers.push_back(requestedBuf);
4961
4962 // Add to buffer handle the pending buffers list
4963 PendingBufferInfo bufferInfo;
4964 bufferInfo.buffer = request->output_buffers[i].buffer;
4965 bufferInfo.stream = request->output_buffers[i].stream;
4966 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4967 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4968 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4969 frameNumber, bufferInfo.buffer,
4970 channel->getStreamTypeMask(), bufferInfo.stream->format);
4971 }
4972 // Add this request packet into mPendingBuffersMap
4973 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4974 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4975 mPendingBuffersMap.get_num_overall_buffers());
4976
4977 latestRequest = mPendingRequestsList.insert(
4978 mPendingRequestsList.end(), pendingRequest);
4979 if(mFlush) {
4980 LOGI("mFlush is true");
4981 pthread_mutex_unlock(&mMutex);
4982 return NO_ERROR;
4983 }
4984
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004985 int indexUsed;
Thierry Strudel3d639192016-09-09 11:52:26 -07004986 // Notify metadata channel we receive a request
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004987 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07004988
4989 if(request->input_buffer != NULL){
4990 LOGD("Input request, frame_number %d", frameNumber);
4991 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4992 if (NO_ERROR != rc) {
4993 LOGE("fail to set reproc parameters");
4994 pthread_mutex_unlock(&mMutex);
4995 return rc;
4996 }
4997 }
4998
4999 // Call request on other streams
5000 uint32_t streams_need_metadata = 0;
5001 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5002 for (size_t i = 0; i < request->num_output_buffers; i++) {
5003 const camera3_stream_buffer_t& output = request->output_buffers[i];
5004 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5005
5006 if (channel == NULL) {
5007 LOGW("invalid channel pointer for stream");
5008 continue;
5009 }
5010
5011 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5012 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5013 output.buffer, request->input_buffer, frameNumber);
5014 if(request->input_buffer != NULL){
5015 rc = channel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005016 pInputBuffer, &mReprocMeta, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 if (rc < 0) {
5018 LOGE("Fail to request on picture channel");
5019 pthread_mutex_unlock(&mMutex);
5020 return rc;
5021 }
5022 } else {
5023 LOGD("snapshot request with buffer %p, frame_number %d",
5024 output.buffer, frameNumber);
5025 if (!request->settings) {
5026 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005027 NULL, mPrevParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 } else {
5029 rc = channel->request(output.buffer, frameNumber,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005030 NULL, mParameters, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 }
5032 if (rc < 0) {
5033 LOGE("Fail to request on picture channel");
5034 pthread_mutex_unlock(&mMutex);
5035 return rc;
5036 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037
5038 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5039 uint32_t j = 0;
5040 for (j = 0; j < streamsArray.num_streams; j++) {
5041 if (streamsArray.stream_request[j].streamID == streamId) {
5042 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5043 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5044 else
5045 streamsArray.stream_request[j].buf_index = indexUsed;
5046 break;
5047 }
5048 }
5049 if (j == streamsArray.num_streams) {
5050 LOGE("Did not find matching stream to update index");
5051 assert(0);
5052 }
5053
Thierry Strudel3d639192016-09-09 11:52:26 -07005054 pendingBufferIter->need_metadata = true;
5055 streams_need_metadata++;
5056 }
5057 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5058 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005059 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5060 rc = yuvChannel->request(output.buffer, frameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005061 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5062 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005063 if (rc < 0) {
5064 LOGE("Fail to request on YUV channel");
5065 pthread_mutex_unlock(&mMutex);
5066 return rc;
5067 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005068
5069 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5070 uint32_t j = 0;
5071 for (j = 0; j < streamsArray.num_streams; j++) {
5072 if (streamsArray.stream_request[j].streamID == streamId) {
5073 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5074 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5075 else
5076 streamsArray.stream_request[j].buf_index = indexUsed;
5077 break;
5078 }
5079 }
5080 if (j == streamsArray.num_streams) {
5081 LOGE("Did not find matching stream to update index");
5082 assert(0);
5083 }
5084
Thierry Strudel3d639192016-09-09 11:52:26 -07005085 pendingBufferIter->need_metadata = needMetadata;
5086 if (needMetadata)
5087 streams_need_metadata += 1;
5088 LOGD("calling YUV channel request, need_metadata is %d",
5089 needMetadata);
5090 } else {
5091 LOGD("request with buffer %p, frame_number %d",
5092 output.buffer, frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005093
5094 rc = channel->request(output.buffer, frameNumber, indexUsed);
5095
5096 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5097 uint32_t j = 0;
5098 for (j = 0; j < streamsArray.num_streams; j++) {
5099 if (streamsArray.stream_request[j].streamID == streamId) {
5100 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5101 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5102 else
5103 streamsArray.stream_request[j].buf_index = indexUsed;
5104 break;
5105 }
5106 }
5107 if (j == streamsArray.num_streams) {
5108 LOGE("Did not find matching stream to update index");
5109 assert(0);
5110 }
5111
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5113 && mBatchSize) {
5114 mToBeQueuedVidBufs++;
5115 if (mToBeQueuedVidBufs == mBatchSize) {
5116 channel->queueBatchBuf();
5117 }
5118 }
5119 if (rc < 0) {
5120 LOGE("request failed");
5121 pthread_mutex_unlock(&mMutex);
5122 return rc;
5123 }
5124 }
5125 pendingBufferIter++;
5126 }
5127
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005128 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5129 itr++) {
5130 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5131
5132 if (channel == NULL) {
5133 LOGE("invalid channel pointer for stream");
5134 assert(0);
5135 return BAD_VALUE;
5136 }
5137
5138 InternalRequest requestedStream;
5139 requestedStream = (*itr);
5140
5141
5142 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5143 LOGD("snapshot request internally input buffer %p, frame_number %d",
5144 request->input_buffer, frameNumber);
5145 if(request->input_buffer != NULL){
5146 rc = channel->request(NULL, frameNumber,
5147 pInputBuffer, &mReprocMeta, indexUsed, true, requestedStream.meteringOnly);
5148 if (rc < 0) {
5149 LOGE("Fail to request on picture channel");
5150 pthread_mutex_unlock(&mMutex);
5151 return rc;
5152 }
5153 } else {
5154 LOGD("snapshot request with frame_number %d", frameNumber);
5155 if (!request->settings) {
5156 rc = channel->request(NULL, frameNumber,
5157 NULL, mPrevParameters, indexUsed, true, requestedStream.meteringOnly);
5158 } else {
5159 rc = channel->request(NULL, frameNumber,
5160 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5161 }
5162 if (rc < 0) {
5163 LOGE("Fail to request on picture channel");
5164 pthread_mutex_unlock(&mMutex);
5165 return rc;
5166 }
5167
5168 if ((*itr).meteringOnly != 1) {
5169 requestedStream.need_metadata = 1;
5170 streams_need_metadata++;
5171 }
5172 }
5173
5174 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5175 uint32_t j = 0;
5176 for (j = 0; j < streamsArray.num_streams; j++) {
5177 if (streamsArray.stream_request[j].streamID == streamId) {
5178 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5179 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5180 else
5181 streamsArray.stream_request[j].buf_index = indexUsed;
5182 break;
5183 }
5184 }
5185 if (j == streamsArray.num_streams) {
5186 LOGE("Did not find matching stream to update index");
5187 assert(0);
5188 }
5189
5190 } else {
5191 LOGE("Internal requests not supported on this stream type");
5192 assert(0);
5193 return INVALID_OPERATION;
5194 }
5195 latestRequest->internalRequestList.push_back(requestedStream);
5196 }
5197
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 //If 2 streams have need_metadata set to true, fail the request, unless
5199 //we copy/reference count the metadata buffer
5200 if (streams_need_metadata > 1) {
5201 LOGE("not supporting request in which two streams requires"
5202 " 2 HAL metadata for reprocessing");
5203 pthread_mutex_unlock(&mMutex);
5204 return -EINVAL;
5205 }
5206
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005207 if (request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 /* Set the parameters to backend:
5209 * - For every request in NORMAL MODE
5210 * - For every request in HFR mode during preview only case
5211 * - Once every batch in HFR mode during video recording
5212 */
5213 if (!mBatchSize ||
5214 (mBatchSize && !isVidBufRequested) ||
5215 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5216 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5217 mBatchSize, isVidBufRequested,
5218 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005219
5220 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5221 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5222 uint32_t m = 0;
5223 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5224 if (streamsArray.stream_request[k].streamID ==
5225 mBatchedStreamsArray.stream_request[m].streamID)
5226 break;
5227 }
5228 if (m == mBatchedStreamsArray.num_streams) {
5229 mBatchedStreamsArray.stream_request\
5230 [mBatchedStreamsArray.num_streams].streamID =
5231 streamsArray.stream_request[k].streamID;
5232 mBatchedStreamsArray.stream_request\
5233 [mBatchedStreamsArray.num_streams].buf_index =
5234 streamsArray.stream_request[k].buf_index;
5235 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5236 }
5237 }
5238 streamsArray = mBatchedStreamsArray;
5239 }
5240 /* Update stream id of all the requested buffers */
5241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
5242 LOGE("Failed to set stream type mask in the parameters");
5243 return BAD_VALUE;
5244 }
5245
Thierry Strudel3d639192016-09-09 11:52:26 -07005246 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5247 mParameters);
5248 if (rc < 0) {
5249 LOGE("set_parms failed");
5250 }
5251 /* reset to zero coz, the batch is queued */
5252 mToBeQueuedVidBufs = 0;
5253 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5255 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
5256 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5257 uint32_t m = 0;
5258 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5259 if (streamsArray.stream_request[k].streamID ==
5260 mBatchedStreamsArray.stream_request[m].streamID)
5261 break;
5262 }
5263 if (m == mBatchedStreamsArray.num_streams) {
5264 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].streamID =
5265 streamsArray.stream_request[k].streamID;
5266 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].buf_index =
5267 streamsArray.stream_request[k].buf_index;
5268 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5269 }
5270 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005271 }
5272 mPendingLiveRequest++;
5273 }
5274
5275 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5276
5277 mState = STARTED;
5278 // Added a timed condition wait
5279 struct timespec ts;
5280 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005281 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 if (rc < 0) {
5283 isValidTimeout = 0;
5284 LOGE("Error reading the real time clock!!");
5285 }
5286 else {
5287 // Make timeout as 5 sec for request to be honored
5288 ts.tv_sec += 5;
5289 }
5290 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005291 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005292 (mState != ERROR) && (mState != DEINIT)) {
5293 if (!isValidTimeout) {
5294 LOGD("Blocking on conditional wait");
5295 pthread_cond_wait(&mRequestCond, &mMutex);
5296 }
5297 else {
5298 LOGD("Blocking on timed conditional wait");
5299 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5300 if (rc == ETIMEDOUT) {
5301 rc = -ENODEV;
5302 LOGE("Unblocked on timeout!!!!");
5303 break;
5304 }
5305 }
5306 LOGD("Unblocked");
5307 if (mWokenUpByDaemon) {
5308 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005309 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 break;
5311 }
5312 }
5313 pthread_mutex_unlock(&mMutex);
5314
5315 return rc;
5316}
5317
5318/*===========================================================================
5319 * FUNCTION : dump
5320 *
5321 * DESCRIPTION:
5322 *
5323 * PARAMETERS :
5324 *
5325 *
5326 * RETURN :
5327 *==========================================================================*/
5328void QCamera3HardwareInterface::dump(int fd)
5329{
5330 pthread_mutex_lock(&mMutex);
5331 dprintf(fd, "\n Camera HAL3 information Begin \n");
5332
5333 dprintf(fd, "\nNumber of pending requests: %zu \n",
5334 mPendingRequestsList.size());
5335 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5336 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5337 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5338 for(pendingRequestIterator i = mPendingRequestsList.begin();
5339 i != mPendingRequestsList.end(); i++) {
5340 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5341 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5342 i->input_buffer);
5343 }
5344 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5345 mPendingBuffersMap.get_num_overall_buffers());
5346 dprintf(fd, "-------+------------------\n");
5347 dprintf(fd, " Frame | Stream type mask \n");
5348 dprintf(fd, "-------+------------------\n");
5349 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5350 for(auto &j : req.mPendingBufferList) {
5351 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5352 dprintf(fd, " %5d | %11d \n",
5353 req.frame_number, channel->getStreamTypeMask());
5354 }
5355 }
5356 dprintf(fd, "-------+------------------\n");
5357
5358 dprintf(fd, "\nPending frame drop list: %zu\n",
5359 mPendingFrameDropList.size());
5360 dprintf(fd, "-------+-----------\n");
5361 dprintf(fd, " Frame | Stream ID \n");
5362 dprintf(fd, "-------+-----------\n");
5363 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5364 i != mPendingFrameDropList.end(); i++) {
5365 dprintf(fd, " %5d | %9d \n",
5366 i->frame_number, i->stream_ID);
5367 }
5368 dprintf(fd, "-------+-----------\n");
5369
5370 dprintf(fd, "\n Camera HAL3 information End \n");
5371
5372 /* use dumpsys media.camera as trigger to send update debug level event */
5373 mUpdateDebugLevel = true;
5374 pthread_mutex_unlock(&mMutex);
5375 return;
5376}
5377
5378/*===========================================================================
5379 * FUNCTION : flush
5380 *
5381 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5382 * conditionally restarts channels
5383 *
5384 * PARAMETERS :
5385 * @ restartChannels: re-start all channels
5386 *
5387 *
5388 * RETURN :
5389 * 0 on success
5390 * Error code on failure
5391 *==========================================================================*/
5392int QCamera3HardwareInterface::flush(bool restartChannels)
5393{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005394 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005395 int32_t rc = NO_ERROR;
5396
5397 LOGD("Unblocking Process Capture Request");
5398 pthread_mutex_lock(&mMutex);
5399 mFlush = true;
5400 pthread_mutex_unlock(&mMutex);
5401
5402 rc = stopAllChannels();
5403 // unlink of dualcam
5404 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005405 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5406 &m_pDualCamCmdPtr->bundle_info;
5407 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005408 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5409 pthread_mutex_lock(&gCamLock);
5410
5411 if (mIsMainCamera == 1) {
5412 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5413 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005414 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 // related session id should be session id of linked session
5416 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5417 } else {
5418 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5419 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005420 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5422 }
5423 pthread_mutex_unlock(&gCamLock);
5424
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005425 rc = mCameraHandle->ops->set_dual_cam_cmd(
5426 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005427 if (rc < 0) {
5428 LOGE("Dualcam: Unlink failed, but still proceed to close");
5429 }
5430 }
5431
5432 if (rc < 0) {
5433 LOGE("stopAllChannels failed");
5434 return rc;
5435 }
5436 if (mChannelHandle) {
5437 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5438 mChannelHandle);
5439 }
5440
5441 // Reset bundle info
5442 rc = setBundleInfo();
5443 if (rc < 0) {
5444 LOGE("setBundleInfo failed %d", rc);
5445 return rc;
5446 }
5447
5448 // Mutex Lock
5449 pthread_mutex_lock(&mMutex);
5450
5451 // Unblock process_capture_request
5452 mPendingLiveRequest = 0;
5453 pthread_cond_signal(&mRequestCond);
5454
5455 rc = notifyErrorForPendingRequests();
5456 if (rc < 0) {
5457 LOGE("notifyErrorForPendingRequests failed");
5458 pthread_mutex_unlock(&mMutex);
5459 return rc;
5460 }
5461
5462 mFlush = false;
5463
5464 // Start the Streams/Channels
5465 if (restartChannels) {
5466 rc = startAllChannels();
5467 if (rc < 0) {
5468 LOGE("startAllChannels failed");
5469 pthread_mutex_unlock(&mMutex);
5470 return rc;
5471 }
5472 }
5473
5474 if (mChannelHandle) {
5475 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5476 mChannelHandle);
5477 if (rc < 0) {
5478 LOGE("start_channel failed");
5479 pthread_mutex_unlock(&mMutex);
5480 return rc;
5481 }
5482 }
5483
5484 pthread_mutex_unlock(&mMutex);
5485
5486 return 0;
5487}
5488
5489/*===========================================================================
5490 * FUNCTION : flushPerf
5491 *
5492 * DESCRIPTION: This is the performance optimization version of flush that does
5493 * not use stream off, rather flushes the system
5494 *
5495 * PARAMETERS :
5496 *
5497 *
5498 * RETURN : 0 : success
5499 * -EINVAL: input is malformed (device is not valid)
5500 * -ENODEV: if the device has encountered a serious error
5501 *==========================================================================*/
5502int QCamera3HardwareInterface::flushPerf()
5503{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005504 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 int32_t rc = 0;
5506 struct timespec timeout;
5507 bool timed_wait = false;
5508
5509 pthread_mutex_lock(&mMutex);
5510 mFlushPerf = true;
5511 mPendingBuffersMap.numPendingBufsAtFlush =
5512 mPendingBuffersMap.get_num_overall_buffers();
5513 LOGD("Calling flush. Wait for %d buffers to return",
5514 mPendingBuffersMap.numPendingBufsAtFlush);
5515
5516 /* send the flush event to the backend */
5517 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5518 if (rc < 0) {
5519 LOGE("Error in flush: IOCTL failure");
5520 mFlushPerf = false;
5521 pthread_mutex_unlock(&mMutex);
5522 return -ENODEV;
5523 }
5524
5525 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5526 LOGD("No pending buffers in HAL, return flush");
5527 mFlushPerf = false;
5528 pthread_mutex_unlock(&mMutex);
5529 return rc;
5530 }
5531
5532 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005533 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 if (rc < 0) {
5535 LOGE("Error reading the real time clock, cannot use timed wait");
5536 } else {
5537 timeout.tv_sec += FLUSH_TIMEOUT;
5538 timed_wait = true;
5539 }
5540
5541 //Block on conditional variable
5542 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5543 LOGD("Waiting on mBuffersCond");
5544 if (!timed_wait) {
5545 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5546 if (rc != 0) {
5547 LOGE("pthread_cond_wait failed due to rc = %s",
5548 strerror(rc));
5549 break;
5550 }
5551 } else {
5552 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5553 if (rc != 0) {
5554 LOGE("pthread_cond_timedwait failed due to rc = %s",
5555 strerror(rc));
5556 break;
5557 }
5558 }
5559 }
5560 if (rc != 0) {
5561 mFlushPerf = false;
5562 pthread_mutex_unlock(&mMutex);
5563 return -ENODEV;
5564 }
5565
5566 LOGD("Received buffers, now safe to return them");
5567
5568 //make sure the channels handle flush
5569 //currently only required for the picture channel to release snapshot resources
5570 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5571 it != mStreamInfo.end(); it++) {
5572 QCamera3Channel *channel = (*it)->channel;
5573 if (channel) {
5574 rc = channel->flush();
5575 if (rc) {
5576 LOGE("Flushing the channels failed with error %d", rc);
5577 // even though the channel flush failed we need to continue and
5578 // return the buffers we have to the framework, however the return
5579 // value will be an error
5580 rc = -ENODEV;
5581 }
5582 }
5583 }
5584
5585 /* notify the frameworks and send errored results */
5586 rc = notifyErrorForPendingRequests();
5587 if (rc < 0) {
5588 LOGE("notifyErrorForPendingRequests failed");
5589 pthread_mutex_unlock(&mMutex);
5590 return rc;
5591 }
5592
5593 //unblock process_capture_request
5594 mPendingLiveRequest = 0;
5595 unblockRequestIfNecessary();
5596
5597 mFlushPerf = false;
5598 pthread_mutex_unlock(&mMutex);
5599 LOGD ("Flush Operation complete. rc = %d", rc);
5600 return rc;
5601}
5602
5603/*===========================================================================
5604 * FUNCTION : handleCameraDeviceError
5605 *
5606 * DESCRIPTION: This function calls internal flush and notifies the error to
5607 * framework and updates the state variable.
5608 *
5609 * PARAMETERS : None
5610 *
5611 * RETURN : NO_ERROR on Success
5612 * Error code on failure
5613 *==========================================================================*/
5614int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5615{
5616 int32_t rc = NO_ERROR;
5617
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005618 {
5619 Mutex::Autolock lock(mFlushLock);
5620 pthread_mutex_lock(&mMutex);
5621 if (mState != ERROR) {
5622 //if mState != ERROR, nothing to be done
5623 pthread_mutex_unlock(&mMutex);
5624 return NO_ERROR;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005627
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005628 rc = flush(false /* restart channels */);
5629 if (NO_ERROR != rc) {
5630 LOGE("internal flush to handle mState = ERROR failed");
5631 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005632
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005633 pthread_mutex_lock(&mMutex);
5634 mState = DEINIT;
5635 pthread_mutex_unlock(&mMutex);
5636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005637
5638 camera3_notify_msg_t notify_msg;
5639 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5640 notify_msg.type = CAMERA3_MSG_ERROR;
5641 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5642 notify_msg.message.error.error_stream = NULL;
5643 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005644 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005645
5646 return rc;
5647}
5648
5649/*===========================================================================
5650 * FUNCTION : captureResultCb
5651 *
5652 * DESCRIPTION: Callback handler for all capture result
5653 * (streams, as well as metadata)
5654 *
5655 * PARAMETERS :
5656 * @metadata : metadata information
5657 * @buffer : actual gralloc buffer to be returned to frameworks.
5658 * NULL if metadata.
5659 *
5660 * RETURN : NONE
5661 *==========================================================================*/
5662void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5663 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5664{
5665 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005666 pthread_mutex_lock(&mMutex);
5667 uint8_t batchSize = mBatchSize;
5668 pthread_mutex_unlock(&mMutex);
5669 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005670 handleBatchMetadata(metadata_buf,
5671 true /* free_and_bufdone_meta_buf */);
5672 } else { /* mBatchSize = 0 */
5673 hdrPlusPerfLock(metadata_buf);
5674 pthread_mutex_lock(&mMutex);
5675 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005676 true /* free_and_bufdone_meta_buf */,
5677 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 pthread_mutex_unlock(&mMutex);
5679 }
5680 } else if (isInputBuffer) {
5681 pthread_mutex_lock(&mMutex);
5682 handleInputBufferWithLock(frame_number);
5683 pthread_mutex_unlock(&mMutex);
5684 } else {
5685 pthread_mutex_lock(&mMutex);
5686 handleBufferWithLock(buffer, frame_number);
5687 pthread_mutex_unlock(&mMutex);
5688 }
5689 return;
5690}
5691
5692/*===========================================================================
5693 * FUNCTION : getReprocessibleOutputStreamId
5694 *
5695 * DESCRIPTION: Get source output stream id for the input reprocess stream
5696 * based on size and format, which would be the largest
5697 * output stream if an input stream exists.
5698 *
5699 * PARAMETERS :
5700 * @id : return the stream id if found
5701 *
5702 * RETURN : int32_t type of status
5703 * NO_ERROR -- success
5704 * none-zero failure code
5705 *==========================================================================*/
5706int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5707{
5708 /* check if any output or bidirectional stream with the same size and format
5709 and return that stream */
5710 if ((mInputStreamInfo.dim.width > 0) &&
5711 (mInputStreamInfo.dim.height > 0)) {
5712 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5713 it != mStreamInfo.end(); it++) {
5714
5715 camera3_stream_t *stream = (*it)->stream;
5716 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5717 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5718 (stream->format == mInputStreamInfo.format)) {
5719 // Usage flag for an input stream and the source output stream
5720 // may be different.
5721 LOGD("Found reprocessible output stream! %p", *it);
5722 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5723 stream->usage, mInputStreamInfo.usage);
5724
5725 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5726 if (channel != NULL && channel->mStreams[0]) {
5727 id = channel->mStreams[0]->getMyServerID();
5728 return NO_ERROR;
5729 }
5730 }
5731 }
5732 } else {
5733 LOGD("No input stream, so no reprocessible output stream");
5734 }
5735 return NAME_NOT_FOUND;
5736}
5737
5738/*===========================================================================
5739 * FUNCTION : lookupFwkName
5740 *
5741 * DESCRIPTION: In case the enum is not same in fwk and backend
5742 * make sure the parameter is correctly propogated
5743 *
5744 * PARAMETERS :
5745 * @arr : map between the two enums
5746 * @len : len of the map
5747 * @hal_name : name of the hal_parm to map
5748 *
5749 * RETURN : int type of status
5750 * fwk_name -- success
5751 * none-zero failure code
5752 *==========================================================================*/
5753template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5754 size_t len, halType hal_name)
5755{
5756
5757 for (size_t i = 0; i < len; i++) {
5758 if (arr[i].hal_name == hal_name) {
5759 return arr[i].fwk_name;
5760 }
5761 }
5762
5763 /* Not able to find matching framework type is not necessarily
5764 * an error case. This happens when mm-camera supports more attributes
5765 * than the frameworks do */
5766 LOGH("Cannot find matching framework type");
5767 return NAME_NOT_FOUND;
5768}
5769
5770/*===========================================================================
5771 * FUNCTION : lookupHalName
5772 *
5773 * DESCRIPTION: In case the enum is not same in fwk and backend
5774 * make sure the parameter is correctly propogated
5775 *
5776 * PARAMETERS :
5777 * @arr : map between the two enums
5778 * @len : len of the map
5779 * @fwk_name : name of the hal_parm to map
5780 *
5781 * RETURN : int32_t type of status
5782 * hal_name -- success
5783 * none-zero failure code
5784 *==========================================================================*/
5785template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5786 size_t len, fwkType fwk_name)
5787{
5788 for (size_t i = 0; i < len; i++) {
5789 if (arr[i].fwk_name == fwk_name) {
5790 return arr[i].hal_name;
5791 }
5792 }
5793
5794 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5795 return NAME_NOT_FOUND;
5796}
5797
5798/*===========================================================================
5799 * FUNCTION : lookupProp
5800 *
5801 * DESCRIPTION: lookup a value by its name
5802 *
5803 * PARAMETERS :
5804 * @arr : map between the two enums
5805 * @len : size of the map
5806 * @name : name to be looked up
5807 *
5808 * RETURN : Value if found
5809 * CAM_CDS_MODE_MAX if not found
5810 *==========================================================================*/
5811template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5812 size_t len, const char *name)
5813{
5814 if (name) {
5815 for (size_t i = 0; i < len; i++) {
5816 if (!strcmp(arr[i].desc, name)) {
5817 return arr[i].val;
5818 }
5819 }
5820 }
5821 return CAM_CDS_MODE_MAX;
5822}
5823
5824/*===========================================================================
5825 *
5826 * DESCRIPTION:
5827 *
5828 * PARAMETERS :
5829 * @metadata : metadata information from callback
5830 * @timestamp: metadata buffer timestamp
5831 * @request_id: request id
5832 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005833 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5834 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005835 * @pprocDone: whether internal offline postprocsesing is done
5836 *
5837 * RETURN : camera_metadata_t*
5838 * metadata in a format specified by fwk
5839 *==========================================================================*/
5840camera_metadata_t*
5841QCamera3HardwareInterface::translateFromHalMetadata(
5842 metadata_buffer_t *metadata,
5843 nsecs_t timestamp,
5844 int32_t request_id,
5845 const CameraMetadata& jpegMetadata,
5846 uint8_t pipeline_depth,
5847 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005848 /* DevCamDebug metadata translateFromHalMetadata argument */
5849 uint8_t DevCamDebug_meta_enable,
5850 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005851 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005852 uint8_t fwk_cacMode,
5853 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005854{
5855 CameraMetadata camMetadata;
5856 camera_metadata_t *resultMetadata;
5857
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005858 if (mBatchSize && !firstMetadataInBatch) {
5859 /* In batch mode, use cached metadata from the first metadata
5860 in the batch */
5861 camMetadata.clear();
5862 camMetadata = mCachedMetadata;
5863 }
5864
Thierry Strudel3d639192016-09-09 11:52:26 -07005865 if (jpegMetadata.entryCount())
5866 camMetadata.append(jpegMetadata);
5867
5868 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5869 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5870 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5871 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005872 if (mBatchSize == 0) {
5873 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
5874 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
5875 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005876
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005877 if (mBatchSize && !firstMetadataInBatch) {
5878 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
5879 resultMetadata = camMetadata.release();
5880 return resultMetadata;
5881 }
5882
Samuel Ha68ba5172016-12-15 18:41:12 -08005883 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
5884 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
5885 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
5886 // DevCamDebug metadata translateFromHalMetadata AF
5887 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
5888 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
5889 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
5890 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
5891 }
5892 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
5893 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
5894 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
5895 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
5896 }
5897 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
5898 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
5899 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
5900 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
5901 }
5902 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
5903 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
5904 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
5905 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
5906 }
5907 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
5908 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
5909 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
5910 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
5911 }
5912 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
5913 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
5914 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
5915 *DevCamDebug_af_monitor_pdaf_target_pos;
5916 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
5917 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
5918 }
5919 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
5920 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
5921 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
5922 *DevCamDebug_af_monitor_pdaf_confidence;
5923 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
5924 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
5925 }
5926 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
5927 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
5928 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
5929 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
5930 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
5931 }
5932 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
5933 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
5934 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
5935 *DevCamDebug_af_monitor_tof_target_pos;
5936 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
5937 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
5938 }
5939 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
5940 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
5941 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
5942 *DevCamDebug_af_monitor_tof_confidence;
5943 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
5944 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
5945 }
5946 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
5947 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
5948 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
5949 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
5950 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
5951 }
5952 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
5953 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
5954 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
5955 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
5956 &fwk_DevCamDebug_af_monitor_type_select, 1);
5957 }
5958 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
5959 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
5960 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
5961 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
5962 &fwk_DevCamDebug_af_monitor_refocus, 1);
5963 }
5964 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
5965 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
5966 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
5967 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
5968 &fwk_DevCamDebug_af_monitor_target_pos, 1);
5969 }
5970 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
5971 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
5972 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
5973 *DevCamDebug_af_search_pdaf_target_pos;
5974 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
5975 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
5976 }
5977 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
5978 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
5979 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
5980 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
5981 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
5982 }
5983 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
5984 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
5985 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
5986 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
5987 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
5988 }
5989 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
5990 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
5991 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
5992 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
5993 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
5994 }
5995 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
5996 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
5997 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
5998 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
5999 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6000 }
6001 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6002 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6003 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6004 *DevCamDebug_af_search_tof_target_pos;
6005 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6006 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6007 }
6008 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6009 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6010 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6011 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6012 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6013 }
6014 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6015 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6016 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6017 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6018 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6019 }
6020 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6021 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6022 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6023 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6024 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6025 }
6026 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6027 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6028 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6029 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6030 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6031 }
6032 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6033 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6034 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6035 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6036 &fwk_DevCamDebug_af_search_type_select, 1);
6037 }
6038 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6039 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6040 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6041 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6042 &fwk_DevCamDebug_af_search_next_pos, 1);
6043 }
6044 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6045 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6046 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6047 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6048 &fwk_DevCamDebug_af_search_target_pos, 1);
6049 }
6050 // DevCamDebug metadata translateFromHalMetadata AEC
6051 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6052 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6053 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6054 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6055 }
6056 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6057 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6058 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6059 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6060 }
6061 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6062 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6063 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6064 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6065 }
6066 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6067 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6068 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6069 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6070 }
6071 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6072 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6073 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6074 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6075 }
6076 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6077 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6078 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6079 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6080 }
6081 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6082 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6083 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6084 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6085 }
6086 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6087 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6088 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6089 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6090 }
6091 // DevCamDebug metadata translateFromHalMetadata AWB
6092 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6093 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6094 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6095 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6096 }
6097 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6098 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6099 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6100 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6101 }
6102 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6103 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6104 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6105 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6106 }
6107 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6108 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6109 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6110 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6111 }
6112 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6113 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6114 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6115 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6116 }
6117 }
6118 // atrace_end(ATRACE_TAG_ALWAYS);
6119
Thierry Strudel3d639192016-09-09 11:52:26 -07006120 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6121 int64_t fwk_frame_number = *frame_number;
6122 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6123 }
6124
6125 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6126 int32_t fps_range[2];
6127 fps_range[0] = (int32_t)float_range->min_fps;
6128 fps_range[1] = (int32_t)float_range->max_fps;
6129 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6130 fps_range, 2);
6131 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6132 fps_range[0], fps_range[1]);
6133 }
6134
6135 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6136 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6137 }
6138
6139 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6140 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6141 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6142 *sceneMode);
6143 if (NAME_NOT_FOUND != val) {
6144 uint8_t fwkSceneMode = (uint8_t)val;
6145 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6146 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6147 fwkSceneMode);
6148 }
6149 }
6150
6151 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6152 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6153 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6154 }
6155
6156 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6157 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6158 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6159 }
6160
6161 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6162 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6163 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6164 }
6165
6166 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6167 CAM_INTF_META_EDGE_MODE, metadata) {
6168 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6169 }
6170
6171 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6172 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6173 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6174 }
6175
6176 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6177 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6178 }
6179
6180 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6181 if (0 <= *flashState) {
6182 uint8_t fwk_flashState = (uint8_t) *flashState;
6183 if (!gCamCapability[mCameraId]->flash_available) {
6184 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6185 }
6186 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6187 }
6188 }
6189
6190 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6191 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6192 if (NAME_NOT_FOUND != val) {
6193 uint8_t fwk_flashMode = (uint8_t)val;
6194 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6195 }
6196 }
6197
6198 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6199 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6200 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6201 }
6202
6203 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6204 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6205 }
6206
6207 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6208 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6209 }
6210
6211 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6212 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6213 }
6214
6215 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6216 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6217 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6218 }
6219
6220 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6221 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6222 LOGD("fwk_videoStab = %d", fwk_videoStab);
6223 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6224 } else {
6225 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6226 // and so hardcoding the Video Stab result to OFF mode.
6227 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6228 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006229 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006230 }
6231
6232 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6233 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6234 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6235 }
6236
6237 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6238 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6239 }
6240
Thierry Strudel3d639192016-09-09 11:52:26 -07006241 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6242 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006243 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006244
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006245 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6246 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006247
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006248 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006249 blackLevelAppliedPattern->cam_black_level[0],
6250 blackLevelAppliedPattern->cam_black_level[1],
6251 blackLevelAppliedPattern->cam_black_level[2],
6252 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006253 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6254 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006255
6256#ifndef USE_HAL_3_3
6257 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006258 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6259 // depth space.
6260 fwk_blackLevelInd[0] /= 4.0;
6261 fwk_blackLevelInd[1] /= 4.0;
6262 fwk_blackLevelInd[2] /= 4.0;
6263 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006264 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6265 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006266#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006267 }
6268
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006269#ifndef USE_HAL_3_3
6270 // Fixed whitelevel is used by ISP/Sensor
6271 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6272 &gCamCapability[mCameraId]->white_level, 1);
6273#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006274
6275 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6276 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6277 int32_t scalerCropRegion[4];
6278 scalerCropRegion[0] = hScalerCropRegion->left;
6279 scalerCropRegion[1] = hScalerCropRegion->top;
6280 scalerCropRegion[2] = hScalerCropRegion->width;
6281 scalerCropRegion[3] = hScalerCropRegion->height;
6282
6283 // Adjust crop region from sensor output coordinate system to active
6284 // array coordinate system.
6285 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6286 scalerCropRegion[2], scalerCropRegion[3]);
6287
6288 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6289 }
6290
6291 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6292 LOGD("sensorExpTime = %lld", *sensorExpTime);
6293 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6294 }
6295
6296 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6297 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6298 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6299 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6300 }
6301
6302 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6303 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6304 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6305 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6306 sensorRollingShutterSkew, 1);
6307 }
6308
6309 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6310 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6311 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6312
6313 //calculate the noise profile based on sensitivity
6314 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6315 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6316 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6317 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6318 noise_profile[i] = noise_profile_S;
6319 noise_profile[i+1] = noise_profile_O;
6320 }
6321 LOGD("noise model entry (S, O) is (%f, %f)",
6322 noise_profile_S, noise_profile_O);
6323 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6324 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6325 }
6326
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006327#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006328 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006329 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006330 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006331 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006332 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6333 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6334 }
6335 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006336#endif
6337
Thierry Strudel3d639192016-09-09 11:52:26 -07006338 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6339 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6340 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6341 }
6342
6343 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6344 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6345 *faceDetectMode);
6346 if (NAME_NOT_FOUND != val) {
6347 uint8_t fwk_faceDetectMode = (uint8_t)val;
6348 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6349
6350 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6351 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6352 CAM_INTF_META_FACE_DETECTION, metadata) {
6353 uint8_t numFaces = MIN(
6354 faceDetectionInfo->num_faces_detected, MAX_ROI);
6355 int32_t faceIds[MAX_ROI];
6356 uint8_t faceScores[MAX_ROI];
6357 int32_t faceRectangles[MAX_ROI * 4];
6358 int32_t faceLandmarks[MAX_ROI * 6];
6359 size_t j = 0, k = 0;
6360
6361 for (size_t i = 0; i < numFaces; i++) {
6362 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6363 // Adjust crop region from sensor output coordinate system to active
6364 // array coordinate system.
6365 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6366 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6367 rect.width, rect.height);
6368
6369 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6370 faceRectangles+j, -1);
6371
6372 j+= 4;
6373 }
6374 if (numFaces <= 0) {
6375 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6376 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6377 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6378 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6379 }
6380
6381 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6382 numFaces);
6383 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6384 faceRectangles, numFaces * 4U);
6385 if (fwk_faceDetectMode ==
6386 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6387 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6388 CAM_INTF_META_FACE_LANDMARK, metadata) {
6389
6390 for (size_t i = 0; i < numFaces; i++) {
6391 // Map the co-ordinate sensor output coordinate system to active
6392 // array coordinate system.
6393 mCropRegionMapper.toActiveArray(
6394 landmarks->face_landmarks[i].left_eye_center.x,
6395 landmarks->face_landmarks[i].left_eye_center.y);
6396 mCropRegionMapper.toActiveArray(
6397 landmarks->face_landmarks[i].right_eye_center.x,
6398 landmarks->face_landmarks[i].right_eye_center.y);
6399 mCropRegionMapper.toActiveArray(
6400 landmarks->face_landmarks[i].mouth_center.x,
6401 landmarks->face_landmarks[i].mouth_center.y);
6402
6403 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006404 k+= TOTAL_LANDMARK_INDICES;
6405 }
6406 } else {
6407 for (size_t i = 0; i < numFaces; i++) {
6408 setInvalidLandmarks(faceLandmarks+k);
6409 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006410 }
6411 }
6412
6413 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6414 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6415 faceLandmarks, numFaces * 6U);
6416 }
6417 }
6418 }
6419 }
6420 }
6421
6422 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6423 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6424 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006425
6426 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6427 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6428 // process histogram statistics info
6429 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6430 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6431 cam_histogram_data_t rHistData, gHistData, bHistData;
6432 memset(&rHistData, 0, sizeof(rHistData));
6433 memset(&gHistData, 0, sizeof(gHistData));
6434 memset(&bHistData, 0, sizeof(bHistData));
6435
6436 switch (stats_data->type) {
6437 case CAM_HISTOGRAM_TYPE_BAYER:
6438 switch (stats_data->bayer_stats.data_type) {
6439 case CAM_STATS_CHANNEL_GR:
6440 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6441 break;
6442 case CAM_STATS_CHANNEL_GB:
6443 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6444 break;
6445 case CAM_STATS_CHANNEL_B:
6446 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6447 break;
6448 case CAM_STATS_CHANNEL_ALL:
6449 rHistData = stats_data->bayer_stats.r_stats;
6450 //Framework expects only 3 channels. So, for now,
6451 //use gb stats for G channel.
6452 gHistData = stats_data->bayer_stats.gb_stats;
6453 bHistData = stats_data->bayer_stats.b_stats;
6454 break;
6455 case CAM_STATS_CHANNEL_Y:
6456 case CAM_STATS_CHANNEL_R:
6457 default:
6458 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6459 break;
6460 }
6461 break;
6462 case CAM_HISTOGRAM_TYPE_YUV:
6463 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6464 break;
6465 }
6466
6467 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6468 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6469 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6470
6471 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6472 }
6473 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006474 }
6475
6476 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6477 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6478 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6479 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6480 }
6481
6482 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6483 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6484 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6485 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6486 }
6487
6488 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6489 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6490 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6491 CAM_MAX_SHADING_MAP_HEIGHT);
6492 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6493 CAM_MAX_SHADING_MAP_WIDTH);
6494 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6495 lensShadingMap->lens_shading, 4U * map_width * map_height);
6496 }
6497
6498 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6499 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6500 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6501 }
6502
6503 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6504 //Populate CAM_INTF_META_TONEMAP_CURVES
6505 /* ch0 = G, ch 1 = B, ch 2 = R*/
6506 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6507 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6508 tonemap->tonemap_points_cnt,
6509 CAM_MAX_TONEMAP_CURVE_SIZE);
6510 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6511 }
6512
6513 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6514 &tonemap->curves[0].tonemap_points[0][0],
6515 tonemap->tonemap_points_cnt * 2);
6516
6517 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6518 &tonemap->curves[1].tonemap_points[0][0],
6519 tonemap->tonemap_points_cnt * 2);
6520
6521 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6522 &tonemap->curves[2].tonemap_points[0][0],
6523 tonemap->tonemap_points_cnt * 2);
6524 }
6525
6526 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6527 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6528 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6529 CC_GAIN_MAX);
6530 }
6531
6532 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6533 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6534 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6535 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6536 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6537 }
6538
6539 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6540 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6541 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6542 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6543 toneCurve->tonemap_points_cnt,
6544 CAM_MAX_TONEMAP_CURVE_SIZE);
6545 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6546 }
6547 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6548 (float*)toneCurve->curve.tonemap_points,
6549 toneCurve->tonemap_points_cnt * 2);
6550 }
6551
6552 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6553 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6554 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6555 predColorCorrectionGains->gains, 4);
6556 }
6557
6558 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6559 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6560 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6561 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6562 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6563 }
6564
6565 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6566 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6567 }
6568
6569 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6570 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6571 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6572 }
6573
6574 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6575 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6576 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6577 }
6578
6579 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6580 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6581 *effectMode);
6582 if (NAME_NOT_FOUND != val) {
6583 uint8_t fwk_effectMode = (uint8_t)val;
6584 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6585 }
6586 }
6587
6588 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6589 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6590 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6591 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6592 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6593 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6594 }
6595 int32_t fwk_testPatternData[4];
6596 fwk_testPatternData[0] = testPatternData->r;
6597 fwk_testPatternData[3] = testPatternData->b;
6598 switch (gCamCapability[mCameraId]->color_arrangement) {
6599 case CAM_FILTER_ARRANGEMENT_RGGB:
6600 case CAM_FILTER_ARRANGEMENT_GRBG:
6601 fwk_testPatternData[1] = testPatternData->gr;
6602 fwk_testPatternData[2] = testPatternData->gb;
6603 break;
6604 case CAM_FILTER_ARRANGEMENT_GBRG:
6605 case CAM_FILTER_ARRANGEMENT_BGGR:
6606 fwk_testPatternData[2] = testPatternData->gr;
6607 fwk_testPatternData[1] = testPatternData->gb;
6608 break;
6609 default:
6610 LOGE("color arrangement %d is not supported",
6611 gCamCapability[mCameraId]->color_arrangement);
6612 break;
6613 }
6614 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6615 }
6616
6617 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6618 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6619 }
6620
6621 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6622 String8 str((const char *)gps_methods);
6623 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6624 }
6625
6626 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6627 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6628 }
6629
6630 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6631 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6632 }
6633
6634 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6635 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6636 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6637 }
6638
6639 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6640 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6641 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6642 }
6643
6644 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6645 int32_t fwk_thumb_size[2];
6646 fwk_thumb_size[0] = thumb_size->width;
6647 fwk_thumb_size[1] = thumb_size->height;
6648 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6649 }
6650
6651 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6652 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6653 privateData,
6654 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6655 }
6656
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006657 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6658 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6659 meteringMode, 1);
6660 }
6661
Thierry Strudel3d639192016-09-09 11:52:26 -07006662 if (metadata->is_tuning_params_valid) {
6663 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6664 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6665 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6666
6667
6668 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6669 sizeof(uint32_t));
6670 data += sizeof(uint32_t);
6671
6672 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6673 sizeof(uint32_t));
6674 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6675 data += sizeof(uint32_t);
6676
6677 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6678 sizeof(uint32_t));
6679 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6680 data += sizeof(uint32_t);
6681
6682 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6683 sizeof(uint32_t));
6684 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6685 data += sizeof(uint32_t);
6686
6687 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6688 sizeof(uint32_t));
6689 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6690 data += sizeof(uint32_t);
6691
6692 metadata->tuning_params.tuning_mod3_data_size = 0;
6693 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6694 sizeof(uint32_t));
6695 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6696 data += sizeof(uint32_t);
6697
6698 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6699 TUNING_SENSOR_DATA_MAX);
6700 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6701 count);
6702 data += count;
6703
6704 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6705 TUNING_VFE_DATA_MAX);
6706 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6707 count);
6708 data += count;
6709
6710 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6711 TUNING_CPP_DATA_MAX);
6712 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6713 count);
6714 data += count;
6715
6716 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6717 TUNING_CAC_DATA_MAX);
6718 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6719 count);
6720 data += count;
6721
6722 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6723 (int32_t *)(void *)tuning_meta_data_blob,
6724 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6725 }
6726
6727 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6728 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6729 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6730 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6731 NEUTRAL_COL_POINTS);
6732 }
6733
6734 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6735 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6736 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6737 }
6738
6739 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6740 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6741 // Adjust crop region from sensor output coordinate system to active
6742 // array coordinate system.
6743 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6744 hAeRegions->rect.width, hAeRegions->rect.height);
6745
6746 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6747 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6748 REGIONS_TUPLE_COUNT);
6749 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6750 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6751 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6752 hAeRegions->rect.height);
6753 }
6754
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006755 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6756 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6757 if (NAME_NOT_FOUND != val) {
6758 uint8_t fwkAfMode = (uint8_t)val;
6759 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6760 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6761 } else {
6762 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6763 val);
6764 }
6765 }
6766
Thierry Strudel3d639192016-09-09 11:52:26 -07006767 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6768 uint8_t fwk_afState = (uint8_t) *afState;
6769 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006770 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006771 }
6772
6773 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6774 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6775 }
6776
6777 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6778 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6779 }
6780
6781 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6782 uint8_t fwk_lensState = *lensState;
6783 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6784 }
6785
6786 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6787 /*af regions*/
6788 int32_t afRegions[REGIONS_TUPLE_COUNT];
6789 // Adjust crop region from sensor output coordinate system to active
6790 // array coordinate system.
6791 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6792 hAfRegions->rect.width, hAfRegions->rect.height);
6793
6794 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6795 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6796 REGIONS_TUPLE_COUNT);
6797 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6798 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6799 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6800 hAfRegions->rect.height);
6801 }
6802
6803 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006804 uint32_t ab_mode = *hal_ab_mode;
6805 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6806 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6807 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006809 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006810 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006811 if (NAME_NOT_FOUND != val) {
6812 uint8_t fwk_ab_mode = (uint8_t)val;
6813 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6814 }
6815 }
6816
6817 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6818 int val = lookupFwkName(SCENE_MODES_MAP,
6819 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6820 if (NAME_NOT_FOUND != val) {
6821 uint8_t fwkBestshotMode = (uint8_t)val;
6822 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6823 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6824 } else {
6825 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6826 }
6827 }
6828
6829 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6830 uint8_t fwk_mode = (uint8_t) *mode;
6831 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6832 }
6833
6834 /* Constant metadata values to be update*/
6835 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6836 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6837
6838 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6839 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6840
6841 int32_t hotPixelMap[2];
6842 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6843
6844 // CDS
6845 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6846 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6847 }
6848
Thierry Strudel04e026f2016-10-10 11:27:36 -07006849 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6850 int32_t fwk_hdr;
6851 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6852 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6853 } else {
6854 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6855 }
6856 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6857 }
6858
6859 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006860 int32_t fwk_ir = (int32_t) *ir;
6861 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006862 }
6863
Thierry Strudel269c81a2016-10-12 12:13:59 -07006864 // AEC SPEED
6865 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6866 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6867 }
6868
6869 // AWB SPEED
6870 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6871 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6872 }
6873
Thierry Strudel3d639192016-09-09 11:52:26 -07006874 // TNR
6875 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
6876 uint8_t tnr_enable = tnr->denoise_enable;
6877 int32_t tnr_process_type = (int32_t)tnr->process_plates;
6878
6879 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
6880 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
6881 }
6882
6883 // Reprocess crop data
6884 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
6885 uint8_t cnt = crop_data->num_of_streams;
6886 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
6887 // mm-qcamera-daemon only posts crop_data for streams
6888 // not linked to pproc. So no valid crop metadata is not
6889 // necessarily an error case.
6890 LOGD("No valid crop metadata entries");
6891 } else {
6892 uint32_t reproc_stream_id;
6893 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6894 LOGD("No reprocessible stream found, ignore crop data");
6895 } else {
6896 int rc = NO_ERROR;
6897 Vector<int32_t> roi_map;
6898 int32_t *crop = new int32_t[cnt*4];
6899 if (NULL == crop) {
6900 rc = NO_MEMORY;
6901 }
6902 if (NO_ERROR == rc) {
6903 int32_t streams_found = 0;
6904 for (size_t i = 0; i < cnt; i++) {
6905 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
6906 if (pprocDone) {
6907 // HAL already does internal reprocessing,
6908 // either via reprocessing before JPEG encoding,
6909 // or offline postprocessing for pproc bypass case.
6910 crop[0] = 0;
6911 crop[1] = 0;
6912 crop[2] = mInputStreamInfo.dim.width;
6913 crop[3] = mInputStreamInfo.dim.height;
6914 } else {
6915 crop[0] = crop_data->crop_info[i].crop.left;
6916 crop[1] = crop_data->crop_info[i].crop.top;
6917 crop[2] = crop_data->crop_info[i].crop.width;
6918 crop[3] = crop_data->crop_info[i].crop.height;
6919 }
6920 roi_map.add(crop_data->crop_info[i].roi_map.left);
6921 roi_map.add(crop_data->crop_info[i].roi_map.top);
6922 roi_map.add(crop_data->crop_info[i].roi_map.width);
6923 roi_map.add(crop_data->crop_info[i].roi_map.height);
6924 streams_found++;
6925 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
6926 crop[0], crop[1], crop[2], crop[3]);
6927 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
6928 crop_data->crop_info[i].roi_map.left,
6929 crop_data->crop_info[i].roi_map.top,
6930 crop_data->crop_info[i].roi_map.width,
6931 crop_data->crop_info[i].roi_map.height);
6932 break;
6933
6934 }
6935 }
6936 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
6937 &streams_found, 1);
6938 camMetadata.update(QCAMERA3_CROP_REPROCESS,
6939 crop, (size_t)(streams_found * 4));
6940 if (roi_map.array()) {
6941 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
6942 roi_map.array(), roi_map.size());
6943 }
6944 }
6945 if (crop) {
6946 delete [] crop;
6947 }
6948 }
6949 }
6950 }
6951
6952 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
6953 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
6954 // so hardcoding the CAC result to OFF mode.
6955 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
6956 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
6957 } else {
6958 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
6959 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
6960 *cacMode);
6961 if (NAME_NOT_FOUND != val) {
6962 uint8_t resultCacMode = (uint8_t)val;
6963 // check whether CAC result from CB is equal to Framework set CAC mode
6964 // If not equal then set the CAC mode came in corresponding request
6965 if (fwk_cacMode != resultCacMode) {
6966 resultCacMode = fwk_cacMode;
6967 }
6968 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
6969 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
6970 } else {
6971 LOGE("Invalid CAC camera parameter: %d", *cacMode);
6972 }
6973 }
6974 }
6975
6976 // Post blob of cam_cds_data through vendor tag.
6977 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
6978 uint8_t cnt = cdsInfo->num_of_streams;
6979 cam_cds_data_t cdsDataOverride;
6980 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
6981 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
6982 cdsDataOverride.num_of_streams = 1;
6983 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
6984 uint32_t reproc_stream_id;
6985 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
6986 LOGD("No reprocessible stream found, ignore cds data");
6987 } else {
6988 for (size_t i = 0; i < cnt; i++) {
6989 if (cdsInfo->cds_info[i].stream_id ==
6990 reproc_stream_id) {
6991 cdsDataOverride.cds_info[0].cds_enable =
6992 cdsInfo->cds_info[i].cds_enable;
6993 break;
6994 }
6995 }
6996 }
6997 } else {
6998 LOGD("Invalid stream count %d in CDS_DATA", cnt);
6999 }
7000 camMetadata.update(QCAMERA3_CDS_INFO,
7001 (uint8_t *)&cdsDataOverride,
7002 sizeof(cam_cds_data_t));
7003 }
7004
7005 // Ldaf calibration data
7006 if (!mLdafCalibExist) {
7007 IF_META_AVAILABLE(uint32_t, ldafCalib,
7008 CAM_INTF_META_LDAF_EXIF, metadata) {
7009 mLdafCalibExist = true;
7010 mLdafCalib[0] = ldafCalib[0];
7011 mLdafCalib[1] = ldafCalib[1];
7012 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7013 ldafCalib[0], ldafCalib[1]);
7014 }
7015 }
7016
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007017 // Reprocess and DDM debug data through vendor tag
7018 cam_reprocess_info_t repro_info;
7019 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007020 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7021 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007022 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007023 }
7024 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7025 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007026 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007027 }
7028 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7029 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007030 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007031 }
7032 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7033 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007034 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007035 }
7036 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7037 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007038 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007039 }
7040 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007041 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007042 }
7043 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7044 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007045 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007046 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007047 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7048 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7049 }
7050 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7051 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7052 }
7053 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7054 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007055
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007056 // INSTANT AEC MODE
7057 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7058 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7059 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7060 }
7061
Shuzhen Wange763e802016-03-31 10:24:29 -07007062 // AF scene change
7063 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7064 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7065 }
7066
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007067 /* In batch mode, cache the first metadata in the batch */
7068 if (mBatchSize && firstMetadataInBatch) {
7069 mCachedMetadata.clear();
7070 mCachedMetadata = camMetadata;
7071 }
7072
Thierry Strudel3d639192016-09-09 11:52:26 -07007073 resultMetadata = camMetadata.release();
7074 return resultMetadata;
7075}
7076
7077/*===========================================================================
7078 * FUNCTION : saveExifParams
7079 *
7080 * DESCRIPTION:
7081 *
7082 * PARAMETERS :
7083 * @metadata : metadata information from callback
7084 *
7085 * RETURN : none
7086 *
7087 *==========================================================================*/
7088void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7089{
7090 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7091 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7092 if (mExifParams.debug_params) {
7093 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7094 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7095 }
7096 }
7097 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7098 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7099 if (mExifParams.debug_params) {
7100 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7101 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7102 }
7103 }
7104 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7105 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7106 if (mExifParams.debug_params) {
7107 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7108 mExifParams.debug_params->af_debug_params_valid = TRUE;
7109 }
7110 }
7111 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7112 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7113 if (mExifParams.debug_params) {
7114 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7115 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7116 }
7117 }
7118 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7119 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7120 if (mExifParams.debug_params) {
7121 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7122 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7123 }
7124 }
7125 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7126 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7127 if (mExifParams.debug_params) {
7128 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7129 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7130 }
7131 }
7132 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7133 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7134 if (mExifParams.debug_params) {
7135 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7136 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7137 }
7138 }
7139 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7140 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7141 if (mExifParams.debug_params) {
7142 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7143 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7144 }
7145 }
7146}
7147
7148/*===========================================================================
7149 * FUNCTION : get3AExifParams
7150 *
7151 * DESCRIPTION:
7152 *
7153 * PARAMETERS : none
7154 *
7155 *
7156 * RETURN : mm_jpeg_exif_params_t
7157 *
7158 *==========================================================================*/
7159mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7160{
7161 return mExifParams;
7162}
7163
7164/*===========================================================================
7165 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7166 *
7167 * DESCRIPTION:
7168 *
7169 * PARAMETERS :
7170 * @metadata : metadata information from callback
7171 *
7172 * RETURN : camera_metadata_t*
7173 * metadata in a format specified by fwk
7174 *==========================================================================*/
7175camera_metadata_t*
7176QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7177 (metadata_buffer_t *metadata)
7178{
7179 CameraMetadata camMetadata;
7180 camera_metadata_t *resultMetadata;
7181
7182
7183 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7184 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7185 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7186 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7187 }
7188
7189 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7190 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7191 &aecTrigger->trigger, 1);
7192 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7193 &aecTrigger->trigger_id, 1);
7194 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7195 aecTrigger->trigger);
7196 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7197 aecTrigger->trigger_id);
7198 }
7199
7200 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7201 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7202 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7203 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7204 }
7205
Thierry Strudel3d639192016-09-09 11:52:26 -07007206 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7207 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7208 &af_trigger->trigger, 1);
7209 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7210 af_trigger->trigger);
7211 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7212 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7213 af_trigger->trigger_id);
7214 }
7215
7216 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7217 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7218 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7219 if (NAME_NOT_FOUND != val) {
7220 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7221 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7222 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7223 } else {
7224 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7225 }
7226 }
7227
7228 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7229 uint32_t aeMode = CAM_AE_MODE_MAX;
7230 int32_t flashMode = CAM_FLASH_MODE_MAX;
7231 int32_t redeye = -1;
7232 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7233 aeMode = *pAeMode;
7234 }
7235 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7236 flashMode = *pFlashMode;
7237 }
7238 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7239 redeye = *pRedeye;
7240 }
7241
7242 if (1 == redeye) {
7243 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7244 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7245 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7246 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7247 flashMode);
7248 if (NAME_NOT_FOUND != val) {
7249 fwk_aeMode = (uint8_t)val;
7250 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7251 } else {
7252 LOGE("Unsupported flash mode %d", flashMode);
7253 }
7254 } else if (aeMode == CAM_AE_MODE_ON) {
7255 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7256 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7257 } else if (aeMode == CAM_AE_MODE_OFF) {
7258 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7259 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7260 } else {
7261 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7262 "flashMode:%d, aeMode:%u!!!",
7263 redeye, flashMode, aeMode);
7264 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007265 if (mInstantAEC) {
7266 // Increment frame Idx count untill a bound reached for instant AEC.
7267 mInstantAecFrameIdxCount++;
7268 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7269 CAM_INTF_META_AEC_INFO, metadata) {
7270 LOGH("ae_params->settled = %d",ae_params->settled);
7271 // If AEC settled, or if number of frames reached bound value,
7272 // should reset instant AEC.
7273 if (ae_params->settled ||
7274 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7275 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7276 mInstantAEC = false;
7277 mResetInstantAEC = true;
7278 mInstantAecFrameIdxCount = 0;
7279 }
7280 }
7281 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007282 resultMetadata = camMetadata.release();
7283 return resultMetadata;
7284}
7285
7286/*===========================================================================
7287 * FUNCTION : dumpMetadataToFile
7288 *
7289 * DESCRIPTION: Dumps tuning metadata to file system
7290 *
7291 * PARAMETERS :
7292 * @meta : tuning metadata
7293 * @dumpFrameCount : current dump frame count
7294 * @enabled : Enable mask
7295 *
7296 *==========================================================================*/
7297void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7298 uint32_t &dumpFrameCount,
7299 bool enabled,
7300 const char *type,
7301 uint32_t frameNumber)
7302{
7303 //Some sanity checks
7304 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7305 LOGE("Tuning sensor data size bigger than expected %d: %d",
7306 meta.tuning_sensor_data_size,
7307 TUNING_SENSOR_DATA_MAX);
7308 return;
7309 }
7310
7311 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7312 LOGE("Tuning VFE data size bigger than expected %d: %d",
7313 meta.tuning_vfe_data_size,
7314 TUNING_VFE_DATA_MAX);
7315 return;
7316 }
7317
7318 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7319 LOGE("Tuning CPP data size bigger than expected %d: %d",
7320 meta.tuning_cpp_data_size,
7321 TUNING_CPP_DATA_MAX);
7322 return;
7323 }
7324
7325 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7326 LOGE("Tuning CAC data size bigger than expected %d: %d",
7327 meta.tuning_cac_data_size,
7328 TUNING_CAC_DATA_MAX);
7329 return;
7330 }
7331 //
7332
7333 if(enabled){
7334 char timeBuf[FILENAME_MAX];
7335 char buf[FILENAME_MAX];
7336 memset(buf, 0, sizeof(buf));
7337 memset(timeBuf, 0, sizeof(timeBuf));
7338 time_t current_time;
7339 struct tm * timeinfo;
7340 time (&current_time);
7341 timeinfo = localtime (&current_time);
7342 if (timeinfo != NULL) {
7343 strftime (timeBuf, sizeof(timeBuf),
7344 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7345 }
7346 String8 filePath(timeBuf);
7347 snprintf(buf,
7348 sizeof(buf),
7349 "%dm_%s_%d.bin",
7350 dumpFrameCount,
7351 type,
7352 frameNumber);
7353 filePath.append(buf);
7354 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7355 if (file_fd >= 0) {
7356 ssize_t written_len = 0;
7357 meta.tuning_data_version = TUNING_DATA_VERSION;
7358 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7359 written_len += write(file_fd, data, sizeof(uint32_t));
7360 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7361 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7362 written_len += write(file_fd, data, sizeof(uint32_t));
7363 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7364 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7365 written_len += write(file_fd, data, sizeof(uint32_t));
7366 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7367 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7368 written_len += write(file_fd, data, sizeof(uint32_t));
7369 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7370 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7371 written_len += write(file_fd, data, sizeof(uint32_t));
7372 meta.tuning_mod3_data_size = 0;
7373 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7374 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7375 written_len += write(file_fd, data, sizeof(uint32_t));
7376 size_t total_size = meta.tuning_sensor_data_size;
7377 data = (void *)((uint8_t *)&meta.data);
7378 written_len += write(file_fd, data, total_size);
7379 total_size = meta.tuning_vfe_data_size;
7380 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7381 written_len += write(file_fd, data, total_size);
7382 total_size = meta.tuning_cpp_data_size;
7383 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7384 written_len += write(file_fd, data, total_size);
7385 total_size = meta.tuning_cac_data_size;
7386 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7387 written_len += write(file_fd, data, total_size);
7388 close(file_fd);
7389 }else {
7390 LOGE("fail to open file for metadata dumping");
7391 }
7392 }
7393}
7394
7395/*===========================================================================
7396 * FUNCTION : cleanAndSortStreamInfo
7397 *
7398 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7399 * and sort them such that raw stream is at the end of the list
7400 * This is a workaround for camera daemon constraint.
7401 *
7402 * PARAMETERS : None
7403 *
7404 *==========================================================================*/
7405void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7406{
7407 List<stream_info_t *> newStreamInfo;
7408
7409 /*clean up invalid streams*/
7410 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7411 it != mStreamInfo.end();) {
7412 if(((*it)->status) == INVALID){
7413 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7414 delete channel;
7415 free(*it);
7416 it = mStreamInfo.erase(it);
7417 } else {
7418 it++;
7419 }
7420 }
7421
7422 // Move preview/video/callback/snapshot streams into newList
7423 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7424 it != mStreamInfo.end();) {
7425 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7426 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7427 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7428 newStreamInfo.push_back(*it);
7429 it = mStreamInfo.erase(it);
7430 } else
7431 it++;
7432 }
7433 // Move raw streams into newList
7434 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7435 it != mStreamInfo.end();) {
7436 newStreamInfo.push_back(*it);
7437 it = mStreamInfo.erase(it);
7438 }
7439
7440 mStreamInfo = newStreamInfo;
7441}
7442
7443/*===========================================================================
7444 * FUNCTION : extractJpegMetadata
7445 *
7446 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7447 * JPEG metadata is cached in HAL, and return as part of capture
7448 * result when metadata is returned from camera daemon.
7449 *
7450 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7451 * @request: capture request
7452 *
7453 *==========================================================================*/
7454void QCamera3HardwareInterface::extractJpegMetadata(
7455 CameraMetadata& jpegMetadata,
7456 const camera3_capture_request_t *request)
7457{
7458 CameraMetadata frame_settings;
7459 frame_settings = request->settings;
7460
7461 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7462 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7463 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7464 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7465
7466 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7467 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7468 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7469 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7470
7471 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7472 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7473 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7474 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7475
7476 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7477 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7478 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7479 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7480
7481 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7482 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7483 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7484 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7485
7486 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7487 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7488 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7489 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7490
7491 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7492 int32_t thumbnail_size[2];
7493 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7494 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7495 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7496 int32_t orientation =
7497 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007498 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007499 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7500 int32_t temp;
7501 temp = thumbnail_size[0];
7502 thumbnail_size[0] = thumbnail_size[1];
7503 thumbnail_size[1] = temp;
7504 }
7505 }
7506 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7507 thumbnail_size,
7508 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7509 }
7510
7511}
7512
7513/*===========================================================================
7514 * FUNCTION : convertToRegions
7515 *
7516 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7517 *
7518 * PARAMETERS :
7519 * @rect : cam_rect_t struct to convert
7520 * @region : int32_t destination array
7521 * @weight : if we are converting from cam_area_t, weight is valid
7522 * else weight = -1
7523 *
7524 *==========================================================================*/
7525void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7526 int32_t *region, int weight)
7527{
7528 region[0] = rect.left;
7529 region[1] = rect.top;
7530 region[2] = rect.left + rect.width;
7531 region[3] = rect.top + rect.height;
7532 if (weight > -1) {
7533 region[4] = weight;
7534 }
7535}
7536
7537/*===========================================================================
7538 * FUNCTION : convertFromRegions
7539 *
7540 * DESCRIPTION: helper method to convert from array to cam_rect_t
7541 *
7542 * PARAMETERS :
7543 * @rect : cam_rect_t struct to convert
7544 * @region : int32_t destination array
7545 * @weight : if we are converting from cam_area_t, weight is valid
7546 * else weight = -1
7547 *
7548 *==========================================================================*/
7549void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7550 const camera_metadata_t *settings, uint32_t tag)
7551{
7552 CameraMetadata frame_settings;
7553 frame_settings = settings;
7554 int32_t x_min = frame_settings.find(tag).data.i32[0];
7555 int32_t y_min = frame_settings.find(tag).data.i32[1];
7556 int32_t x_max = frame_settings.find(tag).data.i32[2];
7557 int32_t y_max = frame_settings.find(tag).data.i32[3];
7558 roi.weight = frame_settings.find(tag).data.i32[4];
7559 roi.rect.left = x_min;
7560 roi.rect.top = y_min;
7561 roi.rect.width = x_max - x_min;
7562 roi.rect.height = y_max - y_min;
7563}
7564
7565/*===========================================================================
7566 * FUNCTION : resetIfNeededROI
7567 *
7568 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7569 * crop region
7570 *
7571 * PARAMETERS :
7572 * @roi : cam_area_t struct to resize
7573 * @scalerCropRegion : cam_crop_region_t region to compare against
7574 *
7575 *
7576 *==========================================================================*/
7577bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7578 const cam_crop_region_t* scalerCropRegion)
7579{
7580 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7581 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7582 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7583 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7584
7585 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7586 * without having this check the calculations below to validate if the roi
7587 * is inside scalar crop region will fail resulting in the roi not being
7588 * reset causing algorithm to continue to use stale roi window
7589 */
7590 if (roi->weight == 0) {
7591 return true;
7592 }
7593
7594 if ((roi_x_max < scalerCropRegion->left) ||
7595 // right edge of roi window is left of scalar crop's left edge
7596 (roi_y_max < scalerCropRegion->top) ||
7597 // bottom edge of roi window is above scalar crop's top edge
7598 (roi->rect.left > crop_x_max) ||
7599 // left edge of roi window is beyond(right) of scalar crop's right edge
7600 (roi->rect.top > crop_y_max)){
7601 // top edge of roi windo is above scalar crop's top edge
7602 return false;
7603 }
7604 if (roi->rect.left < scalerCropRegion->left) {
7605 roi->rect.left = scalerCropRegion->left;
7606 }
7607 if (roi->rect.top < scalerCropRegion->top) {
7608 roi->rect.top = scalerCropRegion->top;
7609 }
7610 if (roi_x_max > crop_x_max) {
7611 roi_x_max = crop_x_max;
7612 }
7613 if (roi_y_max > crop_y_max) {
7614 roi_y_max = crop_y_max;
7615 }
7616 roi->rect.width = roi_x_max - roi->rect.left;
7617 roi->rect.height = roi_y_max - roi->rect.top;
7618 return true;
7619}
7620
7621/*===========================================================================
7622 * FUNCTION : convertLandmarks
7623 *
7624 * DESCRIPTION: helper method to extract the landmarks from face detection info
7625 *
7626 * PARAMETERS :
7627 * @landmark_data : input landmark data to be converted
7628 * @landmarks : int32_t destination array
7629 *
7630 *
7631 *==========================================================================*/
7632void QCamera3HardwareInterface::convertLandmarks(
7633 cam_face_landmarks_info_t landmark_data,
7634 int32_t *landmarks)
7635{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007636 if (landmark_data.is_left_eye_valid) {
7637 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7638 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7639 } else {
7640 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7641 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7642 }
7643
7644 if (landmark_data.is_right_eye_valid) {
7645 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7646 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7647 } else {
7648 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7649 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7650 }
7651
7652 if (landmark_data.is_mouth_valid) {
7653 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7654 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7655 } else {
7656 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7657 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7658 }
7659}
7660
7661/*===========================================================================
7662 * FUNCTION : setInvalidLandmarks
7663 *
7664 * DESCRIPTION: helper method to set invalid landmarks
7665 *
7666 * PARAMETERS :
7667 * @landmarks : int32_t destination array
7668 *
7669 *
7670 *==========================================================================*/
7671void QCamera3HardwareInterface::setInvalidLandmarks(
7672 int32_t *landmarks)
7673{
7674 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7675 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7676 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7677 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7678 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7679 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007680}
7681
7682#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007683
7684/*===========================================================================
7685 * FUNCTION : getCapabilities
7686 *
7687 * DESCRIPTION: query camera capability from back-end
7688 *
7689 * PARAMETERS :
7690 * @ops : mm-interface ops structure
7691 * @cam_handle : camera handle for which we need capability
7692 *
7693 * RETURN : ptr type of capability structure
7694 * capability for success
7695 * NULL for failure
7696 *==========================================================================*/
7697cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7698 uint32_t cam_handle)
7699{
7700 int rc = NO_ERROR;
7701 QCamera3HeapMemory *capabilityHeap = NULL;
7702 cam_capability_t *cap_ptr = NULL;
7703
7704 if (ops == NULL) {
7705 LOGE("Invalid arguments");
7706 return NULL;
7707 }
7708
7709 capabilityHeap = new QCamera3HeapMemory(1);
7710 if (capabilityHeap == NULL) {
7711 LOGE("creation of capabilityHeap failed");
7712 return NULL;
7713 }
7714
7715 /* Allocate memory for capability buffer */
7716 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7717 if(rc != OK) {
7718 LOGE("No memory for cappability");
7719 goto allocate_failed;
7720 }
7721
7722 /* Map memory for capability buffer */
7723 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7724
7725 rc = ops->map_buf(cam_handle,
7726 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7727 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7728 if(rc < 0) {
7729 LOGE("failed to map capability buffer");
7730 rc = FAILED_TRANSACTION;
7731 goto map_failed;
7732 }
7733
7734 /* Query Capability */
7735 rc = ops->query_capability(cam_handle);
7736 if(rc < 0) {
7737 LOGE("failed to query capability");
7738 rc = FAILED_TRANSACTION;
7739 goto query_failed;
7740 }
7741
7742 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7743 if (cap_ptr == NULL) {
7744 LOGE("out of memory");
7745 rc = NO_MEMORY;
7746 goto query_failed;
7747 }
7748
7749 memset(cap_ptr, 0, sizeof(cam_capability_t));
7750 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7751
7752 int index;
7753 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7754 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7755 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7756 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7757 }
7758
7759query_failed:
7760 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7761map_failed:
7762 capabilityHeap->deallocate();
7763allocate_failed:
7764 delete capabilityHeap;
7765
7766 if (rc != NO_ERROR) {
7767 return NULL;
7768 } else {
7769 return cap_ptr;
7770 }
7771}
7772
Thierry Strudel3d639192016-09-09 11:52:26 -07007773/*===========================================================================
7774 * FUNCTION : initCapabilities
7775 *
7776 * DESCRIPTION: initialize camera capabilities in static data struct
7777 *
7778 * PARAMETERS :
7779 * @cameraId : camera Id
7780 *
7781 * RETURN : int32_t type of status
7782 * NO_ERROR -- success
7783 * none-zero failure code
7784 *==========================================================================*/
7785int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7786{
7787 int rc = 0;
7788 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007789 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007790
7791 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7792 if (rc) {
7793 LOGE("camera_open failed. rc = %d", rc);
7794 goto open_failed;
7795 }
7796 if (!cameraHandle) {
7797 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7798 goto open_failed;
7799 }
7800
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007801 handle = get_main_camera_handle(cameraHandle->camera_handle);
7802 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7803 if (gCamCapability[cameraId] == NULL) {
7804 rc = FAILED_TRANSACTION;
7805 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007806 }
7807
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007808 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007809 if (is_dual_camera_by_idx(cameraId)) {
7810 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7811 gCamCapability[cameraId]->aux_cam_cap =
7812 getCapabilities(cameraHandle->ops, handle);
7813 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7814 rc = FAILED_TRANSACTION;
7815 free(gCamCapability[cameraId]);
7816 goto failed_op;
7817 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007818
7819 // Copy the main camera capability to main_cam_cap struct
7820 gCamCapability[cameraId]->main_cam_cap =
7821 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7822 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7823 LOGE("out of memory");
7824 rc = NO_MEMORY;
7825 goto failed_op;
7826 }
7827 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7828 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007829 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007830failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007831 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7832 cameraHandle = NULL;
7833open_failed:
7834 return rc;
7835}
7836
7837/*==========================================================================
7838 * FUNCTION : get3Aversion
7839 *
7840 * DESCRIPTION: get the Q3A S/W version
7841 *
7842 * PARAMETERS :
7843 * @sw_version: Reference of Q3A structure which will hold version info upon
7844 * return
7845 *
7846 * RETURN : None
7847 *
7848 *==========================================================================*/
7849void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7850{
7851 if(gCamCapability[mCameraId])
7852 sw_version = gCamCapability[mCameraId]->q3a_version;
7853 else
7854 LOGE("Capability structure NULL!");
7855}
7856
7857
7858/*===========================================================================
7859 * FUNCTION : initParameters
7860 *
7861 * DESCRIPTION: initialize camera parameters
7862 *
7863 * PARAMETERS :
7864 *
7865 * RETURN : int32_t type of status
7866 * NO_ERROR -- success
7867 * none-zero failure code
7868 *==========================================================================*/
7869int QCamera3HardwareInterface::initParameters()
7870{
7871 int rc = 0;
7872
7873 //Allocate Set Param Buffer
7874 mParamHeap = new QCamera3HeapMemory(1);
7875 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
7876 if(rc != OK) {
7877 rc = NO_MEMORY;
7878 LOGE("Failed to allocate SETPARM Heap memory");
7879 delete mParamHeap;
7880 mParamHeap = NULL;
7881 return rc;
7882 }
7883
7884 //Map memory for parameters buffer
7885 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
7886 CAM_MAPPING_BUF_TYPE_PARM_BUF,
7887 mParamHeap->getFd(0),
7888 sizeof(metadata_buffer_t),
7889 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
7890 if(rc < 0) {
7891 LOGE("failed to map SETPARM buffer");
7892 rc = FAILED_TRANSACTION;
7893 mParamHeap->deallocate();
7894 delete mParamHeap;
7895 mParamHeap = NULL;
7896 return rc;
7897 }
7898
7899 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
7900
7901 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
7902 return rc;
7903}
7904
7905/*===========================================================================
7906 * FUNCTION : deinitParameters
7907 *
7908 * DESCRIPTION: de-initialize camera parameters
7909 *
7910 * PARAMETERS :
7911 *
7912 * RETURN : NONE
7913 *==========================================================================*/
7914void QCamera3HardwareInterface::deinitParameters()
7915{
7916 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
7917 CAM_MAPPING_BUF_TYPE_PARM_BUF);
7918
7919 mParamHeap->deallocate();
7920 delete mParamHeap;
7921 mParamHeap = NULL;
7922
7923 mParameters = NULL;
7924
7925 free(mPrevParameters);
7926 mPrevParameters = NULL;
7927}
7928
7929/*===========================================================================
7930 * FUNCTION : calcMaxJpegSize
7931 *
7932 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
7933 *
7934 * PARAMETERS :
7935 *
7936 * RETURN : max_jpeg_size
7937 *==========================================================================*/
7938size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
7939{
7940 size_t max_jpeg_size = 0;
7941 size_t temp_width, temp_height;
7942 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
7943 MAX_SIZES_CNT);
7944 for (size_t i = 0; i < count; i++) {
7945 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
7946 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
7947 if (temp_width * temp_height > max_jpeg_size ) {
7948 max_jpeg_size = temp_width * temp_height;
7949 }
7950 }
7951 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
7952 return max_jpeg_size;
7953}
7954
7955/*===========================================================================
7956 * FUNCTION : getMaxRawSize
7957 *
7958 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
7959 *
7960 * PARAMETERS :
7961 *
7962 * RETURN : Largest supported Raw Dimension
7963 *==========================================================================*/
7964cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
7965{
7966 int max_width = 0;
7967 cam_dimension_t maxRawSize;
7968
7969 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
7970 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
7971 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
7972 max_width = gCamCapability[camera_id]->raw_dim[i].width;
7973 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
7974 }
7975 }
7976 return maxRawSize;
7977}
7978
7979
7980/*===========================================================================
7981 * FUNCTION : calcMaxJpegDim
7982 *
7983 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
7984 *
7985 * PARAMETERS :
7986 *
7987 * RETURN : max_jpeg_dim
7988 *==========================================================================*/
7989cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
7990{
7991 cam_dimension_t max_jpeg_dim;
7992 cam_dimension_t curr_jpeg_dim;
7993 max_jpeg_dim.width = 0;
7994 max_jpeg_dim.height = 0;
7995 curr_jpeg_dim.width = 0;
7996 curr_jpeg_dim.height = 0;
7997 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
7998 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
7999 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8000 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8001 max_jpeg_dim.width * max_jpeg_dim.height ) {
8002 max_jpeg_dim.width = curr_jpeg_dim.width;
8003 max_jpeg_dim.height = curr_jpeg_dim.height;
8004 }
8005 }
8006 return max_jpeg_dim;
8007}
8008
8009/*===========================================================================
8010 * FUNCTION : addStreamConfig
8011 *
8012 * DESCRIPTION: adds the stream configuration to the array
8013 *
8014 * PARAMETERS :
8015 * @available_stream_configs : pointer to stream configuration array
8016 * @scalar_format : scalar format
8017 * @dim : configuration dimension
8018 * @config_type : input or output configuration type
8019 *
8020 * RETURN : NONE
8021 *==========================================================================*/
8022void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8023 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8024{
8025 available_stream_configs.add(scalar_format);
8026 available_stream_configs.add(dim.width);
8027 available_stream_configs.add(dim.height);
8028 available_stream_configs.add(config_type);
8029}
8030
8031/*===========================================================================
8032 * FUNCTION : suppportBurstCapture
8033 *
8034 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8035 *
8036 * PARAMETERS :
8037 * @cameraId : camera Id
8038 *
8039 * RETURN : true if camera supports BURST_CAPTURE
8040 * false otherwise
8041 *==========================================================================*/
8042bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8043{
8044 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8045 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8046 const int32_t highResWidth = 3264;
8047 const int32_t highResHeight = 2448;
8048
8049 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8050 // Maximum resolution images cannot be captured at >= 10fps
8051 // -> not supporting BURST_CAPTURE
8052 return false;
8053 }
8054
8055 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8056 // Maximum resolution images can be captured at >= 20fps
8057 // --> supporting BURST_CAPTURE
8058 return true;
8059 }
8060
8061 // Find the smallest highRes resolution, or largest resolution if there is none
8062 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8063 MAX_SIZES_CNT);
8064 size_t highRes = 0;
8065 while ((highRes + 1 < totalCnt) &&
8066 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8067 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8068 highResWidth * highResHeight)) {
8069 highRes++;
8070 }
8071 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8072 return true;
8073 } else {
8074 return false;
8075 }
8076}
8077
8078/*===========================================================================
8079 * FUNCTION : initStaticMetadata
8080 *
8081 * DESCRIPTION: initialize the static metadata
8082 *
8083 * PARAMETERS :
8084 * @cameraId : camera Id
8085 *
8086 * RETURN : int32_t type of status
8087 * 0 -- success
8088 * non-zero failure code
8089 *==========================================================================*/
8090int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8091{
8092 int rc = 0;
8093 CameraMetadata staticInfo;
8094 size_t count = 0;
8095 bool limitedDevice = false;
8096 char prop[PROPERTY_VALUE_MAX];
8097 bool supportBurst = false;
8098
8099 supportBurst = supportBurstCapture(cameraId);
8100
8101 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8102 * guaranteed or if min fps of max resolution is less than 20 fps, its
8103 * advertised as limited device*/
8104 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8105 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8106 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8107 !supportBurst;
8108
8109 uint8_t supportedHwLvl = limitedDevice ?
8110 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008111#ifndef USE_HAL_3_3
8112 // LEVEL_3 - This device will support level 3.
8113 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8114#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008115 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008116#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008117
8118 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8119 &supportedHwLvl, 1);
8120
8121 bool facingBack = false;
8122 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8123 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8124 facingBack = true;
8125 }
8126 /*HAL 3 only*/
8127 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8128 &gCamCapability[cameraId]->min_focus_distance, 1);
8129
8130 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8131 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8132
8133 /*should be using focal lengths but sensor doesn't provide that info now*/
8134 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8135 &gCamCapability[cameraId]->focal_length,
8136 1);
8137
8138 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8139 gCamCapability[cameraId]->apertures,
8140 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8141
8142 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8143 gCamCapability[cameraId]->filter_densities,
8144 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8145
8146
8147 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8148 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8149 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8150
8151 int32_t lens_shading_map_size[] = {
8152 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8153 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8154 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8155 lens_shading_map_size,
8156 sizeof(lens_shading_map_size)/sizeof(int32_t));
8157
8158 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8159 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8160
8161 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8162 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8163
8164 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8165 &gCamCapability[cameraId]->max_frame_duration, 1);
8166
8167 camera_metadata_rational baseGainFactor = {
8168 gCamCapability[cameraId]->base_gain_factor.numerator,
8169 gCamCapability[cameraId]->base_gain_factor.denominator};
8170 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8171 &baseGainFactor, 1);
8172
8173 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8174 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8175
8176 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8177 gCamCapability[cameraId]->pixel_array_size.height};
8178 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8179 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8180
8181 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8182 gCamCapability[cameraId]->active_array_size.top,
8183 gCamCapability[cameraId]->active_array_size.width,
8184 gCamCapability[cameraId]->active_array_size.height};
8185 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8186 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8187
8188 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8189 &gCamCapability[cameraId]->white_level, 1);
8190
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008191 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8192 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8193 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008194 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008195 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008196
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008197#ifndef USE_HAL_3_3
8198 bool hasBlackRegions = false;
8199 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8200 LOGW("black_region_count: %d is bounded to %d",
8201 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8202 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8203 }
8204 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8205 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8206 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8207 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8208 }
8209 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8210 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8211 hasBlackRegions = true;
8212 }
8213#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008214 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8215 &gCamCapability[cameraId]->flash_charge_duration, 1);
8216
8217 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8218 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8219
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008220 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8221 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8222 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008223 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8224 &timestampSource, 1);
8225
8226 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8227 &gCamCapability[cameraId]->histogram_size, 1);
8228
8229 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8230 &gCamCapability[cameraId]->max_histogram_count, 1);
8231
8232 int32_t sharpness_map_size[] = {
8233 gCamCapability[cameraId]->sharpness_map_size.width,
8234 gCamCapability[cameraId]->sharpness_map_size.height};
8235
8236 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8237 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8238
8239 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8240 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8241
8242 int32_t scalar_formats[] = {
8243 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8244 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8245 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8246 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8247 HAL_PIXEL_FORMAT_RAW10,
8248 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8249 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8250 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8251 scalar_formats,
8252 scalar_formats_count);
8253
8254 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8255 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8256 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8257 count, MAX_SIZES_CNT, available_processed_sizes);
8258 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8259 available_processed_sizes, count * 2);
8260
8261 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8262 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8263 makeTable(gCamCapability[cameraId]->raw_dim,
8264 count, MAX_SIZES_CNT, available_raw_sizes);
8265 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8266 available_raw_sizes, count * 2);
8267
8268 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8269 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8270 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8271 count, MAX_SIZES_CNT, available_fps_ranges);
8272 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8273 available_fps_ranges, count * 2);
8274
8275 camera_metadata_rational exposureCompensationStep = {
8276 gCamCapability[cameraId]->exp_compensation_step.numerator,
8277 gCamCapability[cameraId]->exp_compensation_step.denominator};
8278 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8279 &exposureCompensationStep, 1);
8280
8281 Vector<uint8_t> availableVstabModes;
8282 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8283 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008284 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008285 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008286 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008287 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008288 count = IS_TYPE_MAX;
8289 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8290 for (size_t i = 0; i < count; i++) {
8291 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8292 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8293 eisSupported = true;
8294 break;
8295 }
8296 }
8297 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008298 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8299 }
8300 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8301 availableVstabModes.array(), availableVstabModes.size());
8302
8303 /*HAL 1 and HAL 3 common*/
8304 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8305 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8306 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8307 float maxZoom = maxZoomStep/minZoomStep;
8308 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8309 &maxZoom, 1);
8310
8311 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8312 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8313
8314 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8315 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8316 max3aRegions[2] = 0; /* AF not supported */
8317 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8318 max3aRegions, 3);
8319
8320 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8321 memset(prop, 0, sizeof(prop));
8322 property_get("persist.camera.facedetect", prop, "1");
8323 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8324 LOGD("Support face detection mode: %d",
8325 supportedFaceDetectMode);
8326
8327 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008328 /* support mode should be OFF if max number of face is 0 */
8329 if (maxFaces <= 0) {
8330 supportedFaceDetectMode = 0;
8331 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008332 Vector<uint8_t> availableFaceDetectModes;
8333 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8334 if (supportedFaceDetectMode == 1) {
8335 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8336 } else if (supportedFaceDetectMode == 2) {
8337 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8338 } else if (supportedFaceDetectMode == 3) {
8339 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8340 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8341 } else {
8342 maxFaces = 0;
8343 }
8344 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8345 availableFaceDetectModes.array(),
8346 availableFaceDetectModes.size());
8347 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8348 (int32_t *)&maxFaces, 1);
8349
8350 int32_t exposureCompensationRange[] = {
8351 gCamCapability[cameraId]->exposure_compensation_min,
8352 gCamCapability[cameraId]->exposure_compensation_max};
8353 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8354 exposureCompensationRange,
8355 sizeof(exposureCompensationRange)/sizeof(int32_t));
8356
8357 uint8_t lensFacing = (facingBack) ?
8358 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8359 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8360
8361 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8362 available_thumbnail_sizes,
8363 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8364
8365 /*all sizes will be clubbed into this tag*/
8366 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8367 /*android.scaler.availableStreamConfigurations*/
8368 Vector<int32_t> available_stream_configs;
8369 cam_dimension_t active_array_dim;
8370 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8371 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8372 /* Add input/output stream configurations for each scalar formats*/
8373 for (size_t j = 0; j < scalar_formats_count; j++) {
8374 switch (scalar_formats[j]) {
8375 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8376 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8377 case HAL_PIXEL_FORMAT_RAW10:
8378 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8379 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8380 addStreamConfig(available_stream_configs, scalar_formats[j],
8381 gCamCapability[cameraId]->raw_dim[i],
8382 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8383 }
8384 break;
8385 case HAL_PIXEL_FORMAT_BLOB:
8386 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8387 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8388 addStreamConfig(available_stream_configs, scalar_formats[j],
8389 gCamCapability[cameraId]->picture_sizes_tbl[i],
8390 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8391 }
8392 break;
8393 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8394 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8395 default:
8396 cam_dimension_t largest_picture_size;
8397 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8398 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8399 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8400 addStreamConfig(available_stream_configs, scalar_formats[j],
8401 gCamCapability[cameraId]->picture_sizes_tbl[i],
8402 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8403 /* Book keep largest */
8404 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8405 >= largest_picture_size.width &&
8406 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8407 >= largest_picture_size.height)
8408 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8409 }
8410 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8411 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8412 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8413 addStreamConfig(available_stream_configs, scalar_formats[j],
8414 largest_picture_size,
8415 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8416 }
8417 break;
8418 }
8419 }
8420
8421 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8422 available_stream_configs.array(), available_stream_configs.size());
8423 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8424 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8425
8426 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8427 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8428
8429 /* android.scaler.availableMinFrameDurations */
8430 Vector<int64_t> available_min_durations;
8431 for (size_t j = 0; j < scalar_formats_count; j++) {
8432 switch (scalar_formats[j]) {
8433 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8434 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8435 case HAL_PIXEL_FORMAT_RAW10:
8436 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8437 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8438 available_min_durations.add(scalar_formats[j]);
8439 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8440 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8441 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8442 }
8443 break;
8444 default:
8445 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8446 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8447 available_min_durations.add(scalar_formats[j]);
8448 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8449 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8450 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8451 }
8452 break;
8453 }
8454 }
8455 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8456 available_min_durations.array(), available_min_durations.size());
8457
8458 Vector<int32_t> available_hfr_configs;
8459 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8460 int32_t fps = 0;
8461 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8462 case CAM_HFR_MODE_60FPS:
8463 fps = 60;
8464 break;
8465 case CAM_HFR_MODE_90FPS:
8466 fps = 90;
8467 break;
8468 case CAM_HFR_MODE_120FPS:
8469 fps = 120;
8470 break;
8471 case CAM_HFR_MODE_150FPS:
8472 fps = 150;
8473 break;
8474 case CAM_HFR_MODE_180FPS:
8475 fps = 180;
8476 break;
8477 case CAM_HFR_MODE_210FPS:
8478 fps = 210;
8479 break;
8480 case CAM_HFR_MODE_240FPS:
8481 fps = 240;
8482 break;
8483 case CAM_HFR_MODE_480FPS:
8484 fps = 480;
8485 break;
8486 case CAM_HFR_MODE_OFF:
8487 case CAM_HFR_MODE_MAX:
8488 default:
8489 break;
8490 }
8491
8492 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8493 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8494 /* For each HFR frame rate, need to advertise one variable fps range
8495 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8496 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8497 * set by the app. When video recording is started, [120, 120] is
8498 * set. This way sensor configuration does not change when recording
8499 * is started */
8500
8501 /* (width, height, fps_min, fps_max, batch_size_max) */
8502 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8503 j < MAX_SIZES_CNT; j++) {
8504 available_hfr_configs.add(
8505 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8506 available_hfr_configs.add(
8507 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8508 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8509 available_hfr_configs.add(fps);
8510 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8511
8512 /* (width, height, fps_min, fps_max, batch_size_max) */
8513 available_hfr_configs.add(
8514 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8515 available_hfr_configs.add(
8516 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8517 available_hfr_configs.add(fps);
8518 available_hfr_configs.add(fps);
8519 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8520 }
8521 }
8522 }
8523 //Advertise HFR capability only if the property is set
8524 memset(prop, 0, sizeof(prop));
8525 property_get("persist.camera.hal3hfr.enable", prop, "1");
8526 uint8_t hfrEnable = (uint8_t)atoi(prop);
8527
8528 if(hfrEnable && available_hfr_configs.array()) {
8529 staticInfo.update(
8530 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8531 available_hfr_configs.array(), available_hfr_configs.size());
8532 }
8533
8534 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8535 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8536 &max_jpeg_size, 1);
8537
8538 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8539 size_t size = 0;
8540 count = CAM_EFFECT_MODE_MAX;
8541 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8542 for (size_t i = 0; i < count; i++) {
8543 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8544 gCamCapability[cameraId]->supported_effects[i]);
8545 if (NAME_NOT_FOUND != val) {
8546 avail_effects[size] = (uint8_t)val;
8547 size++;
8548 }
8549 }
8550 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8551 avail_effects,
8552 size);
8553
8554 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8555 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8556 size_t supported_scene_modes_cnt = 0;
8557 count = CAM_SCENE_MODE_MAX;
8558 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8559 for (size_t i = 0; i < count; i++) {
8560 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8561 CAM_SCENE_MODE_OFF) {
8562 int val = lookupFwkName(SCENE_MODES_MAP,
8563 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8564 gCamCapability[cameraId]->supported_scene_modes[i]);
8565 if (NAME_NOT_FOUND != val) {
8566 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8567 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8568 supported_scene_modes_cnt++;
8569 }
8570 }
8571 }
8572 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8573 avail_scene_modes,
8574 supported_scene_modes_cnt);
8575
8576 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8577 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8578 supported_scene_modes_cnt,
8579 CAM_SCENE_MODE_MAX,
8580 scene_mode_overrides,
8581 supported_indexes,
8582 cameraId);
8583
8584 if (supported_scene_modes_cnt == 0) {
8585 supported_scene_modes_cnt = 1;
8586 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8587 }
8588
8589 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8590 scene_mode_overrides, supported_scene_modes_cnt * 3);
8591
8592 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8593 ANDROID_CONTROL_MODE_AUTO,
8594 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8595 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8596 available_control_modes,
8597 3);
8598
8599 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8600 size = 0;
8601 count = CAM_ANTIBANDING_MODE_MAX;
8602 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8603 for (size_t i = 0; i < count; i++) {
8604 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8605 gCamCapability[cameraId]->supported_antibandings[i]);
8606 if (NAME_NOT_FOUND != val) {
8607 avail_antibanding_modes[size] = (uint8_t)val;
8608 size++;
8609 }
8610
8611 }
8612 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8613 avail_antibanding_modes,
8614 size);
8615
8616 uint8_t avail_abberation_modes[] = {
8617 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8618 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8619 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8620 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8621 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8622 if (0 == count) {
8623 // If no aberration correction modes are available for a device, this advertise OFF mode
8624 size = 1;
8625 } else {
8626 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8627 // So, advertize all 3 modes if atleast any one mode is supported as per the
8628 // new M requirement
8629 size = 3;
8630 }
8631 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8632 avail_abberation_modes,
8633 size);
8634
8635 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8636 size = 0;
8637 count = CAM_FOCUS_MODE_MAX;
8638 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8639 for (size_t i = 0; i < count; i++) {
8640 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8641 gCamCapability[cameraId]->supported_focus_modes[i]);
8642 if (NAME_NOT_FOUND != val) {
8643 avail_af_modes[size] = (uint8_t)val;
8644 size++;
8645 }
8646 }
8647 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8648 avail_af_modes,
8649 size);
8650
8651 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8652 size = 0;
8653 count = CAM_WB_MODE_MAX;
8654 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8655 for (size_t i = 0; i < count; i++) {
8656 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8657 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8658 gCamCapability[cameraId]->supported_white_balances[i]);
8659 if (NAME_NOT_FOUND != val) {
8660 avail_awb_modes[size] = (uint8_t)val;
8661 size++;
8662 }
8663 }
8664 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8665 avail_awb_modes,
8666 size);
8667
8668 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8669 count = CAM_FLASH_FIRING_LEVEL_MAX;
8670 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8671 count);
8672 for (size_t i = 0; i < count; i++) {
8673 available_flash_levels[i] =
8674 gCamCapability[cameraId]->supported_firing_levels[i];
8675 }
8676 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8677 available_flash_levels, count);
8678
8679 uint8_t flashAvailable;
8680 if (gCamCapability[cameraId]->flash_available)
8681 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8682 else
8683 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8684 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8685 &flashAvailable, 1);
8686
8687 Vector<uint8_t> avail_ae_modes;
8688 count = CAM_AE_MODE_MAX;
8689 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8690 for (size_t i = 0; i < count; i++) {
8691 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8692 }
8693 if (flashAvailable) {
8694 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8695 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8696 }
8697 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8698 avail_ae_modes.array(),
8699 avail_ae_modes.size());
8700
8701 int32_t sensitivity_range[2];
8702 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8703 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8704 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8705 sensitivity_range,
8706 sizeof(sensitivity_range) / sizeof(int32_t));
8707
8708 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8709 &gCamCapability[cameraId]->max_analog_sensitivity,
8710 1);
8711
8712 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8713 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8714 &sensor_orientation,
8715 1);
8716
8717 int32_t max_output_streams[] = {
8718 MAX_STALLING_STREAMS,
8719 MAX_PROCESSED_STREAMS,
8720 MAX_RAW_STREAMS};
8721 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8722 max_output_streams,
8723 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8724
8725 uint8_t avail_leds = 0;
8726 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8727 &avail_leds, 0);
8728
8729 uint8_t focus_dist_calibrated;
8730 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8731 gCamCapability[cameraId]->focus_dist_calibrated);
8732 if (NAME_NOT_FOUND != val) {
8733 focus_dist_calibrated = (uint8_t)val;
8734 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8735 &focus_dist_calibrated, 1);
8736 }
8737
8738 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8739 size = 0;
8740 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8741 MAX_TEST_PATTERN_CNT);
8742 for (size_t i = 0; i < count; i++) {
8743 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8744 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8745 if (NAME_NOT_FOUND != testpatternMode) {
8746 avail_testpattern_modes[size] = testpatternMode;
8747 size++;
8748 }
8749 }
8750 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8751 avail_testpattern_modes,
8752 size);
8753
8754 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8755 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8756 &max_pipeline_depth,
8757 1);
8758
8759 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8760 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8761 &partial_result_count,
8762 1);
8763
8764 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8765 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8766
8767 Vector<uint8_t> available_capabilities;
8768 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8769 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8770 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8771 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8772 if (supportBurst) {
8773 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8774 }
8775 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8776 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8777 if (hfrEnable && available_hfr_configs.array()) {
8778 available_capabilities.add(
8779 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8780 }
8781
8782 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8783 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8784 }
8785 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8786 available_capabilities.array(),
8787 available_capabilities.size());
8788
8789 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8790 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8791 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8792 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8793
8794 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8795 &aeLockAvailable, 1);
8796
8797 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8798 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8799 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8800 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8801
8802 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8803 &awbLockAvailable, 1);
8804
8805 int32_t max_input_streams = 1;
8806 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8807 &max_input_streams,
8808 1);
8809
8810 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8811 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8812 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8813 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8814 HAL_PIXEL_FORMAT_YCbCr_420_888};
8815 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8816 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8817
8818 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8819 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8820 &max_latency,
8821 1);
8822
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008823#ifndef USE_HAL_3_3
8824 int32_t isp_sensitivity_range[2];
8825 isp_sensitivity_range[0] =
8826 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8827 isp_sensitivity_range[1] =
8828 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8829 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8830 isp_sensitivity_range,
8831 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8832#endif
8833
Thierry Strudel3d639192016-09-09 11:52:26 -07008834 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8835 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8836 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8837 available_hot_pixel_modes,
8838 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8839
8840 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8841 ANDROID_SHADING_MODE_FAST,
8842 ANDROID_SHADING_MODE_HIGH_QUALITY};
8843 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8844 available_shading_modes,
8845 3);
8846
8847 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8848 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8849 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8850 available_lens_shading_map_modes,
8851 2);
8852
8853 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8854 ANDROID_EDGE_MODE_FAST,
8855 ANDROID_EDGE_MODE_HIGH_QUALITY,
8856 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8857 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8858 available_edge_modes,
8859 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8860
8861 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8862 ANDROID_NOISE_REDUCTION_MODE_FAST,
8863 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8864 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8865 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8866 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8867 available_noise_red_modes,
8868 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8869
8870 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8871 ANDROID_TONEMAP_MODE_FAST,
8872 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8873 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
8874 available_tonemap_modes,
8875 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
8876
8877 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
8878 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
8879 available_hot_pixel_map_modes,
8880 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
8881
8882 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8883 gCamCapability[cameraId]->reference_illuminant1);
8884 if (NAME_NOT_FOUND != val) {
8885 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8886 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
8887 }
8888
8889 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
8890 gCamCapability[cameraId]->reference_illuminant2);
8891 if (NAME_NOT_FOUND != val) {
8892 uint8_t fwkReferenceIlluminant = (uint8_t)val;
8893 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
8894 }
8895
8896 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
8897 (void *)gCamCapability[cameraId]->forward_matrix1,
8898 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8899
8900 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
8901 (void *)gCamCapability[cameraId]->forward_matrix2,
8902 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
8903
8904 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
8905 (void *)gCamCapability[cameraId]->color_transform1,
8906 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8907
8908 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
8909 (void *)gCamCapability[cameraId]->color_transform2,
8910 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
8911
8912 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
8913 (void *)gCamCapability[cameraId]->calibration_transform1,
8914 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8915
8916 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
8917 (void *)gCamCapability[cameraId]->calibration_transform2,
8918 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
8919
8920 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
8921 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
8922 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
8923 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8924 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
8925 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
8926 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
8927 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
8928 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
8929 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
8930 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
8931 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
8932 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8933 ANDROID_JPEG_GPS_COORDINATES,
8934 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
8935 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
8936 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
8937 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8938 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
8939 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
8940 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
8941 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
8942 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
8943 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008944#ifndef USE_HAL_3_3
8945 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8946#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008947 ANDROID_STATISTICS_FACE_DETECT_MODE,
8948 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8949 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
8950 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008951 ANDROID_BLACK_LEVEL_LOCK,
8952 /* DevCamDebug metadata request_keys_basic */
8953 DEVCAMDEBUG_META_ENABLE,
8954 /* DevCamDebug metadata end */
8955 };
Thierry Strudel3d639192016-09-09 11:52:26 -07008956
8957 size_t request_keys_cnt =
8958 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
8959 Vector<int32_t> available_request_keys;
8960 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
8961 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
8962 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
8963 }
8964
8965 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
8966 available_request_keys.array(), available_request_keys.size());
8967
8968 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
8969 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
8970 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
8971 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
8972 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
8973 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
8974 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
8975 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
8976 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
8977 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
8978 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
8979 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
8980 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
8981 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
8982 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
8983 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
8984 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
8985 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
8986 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
8987 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
8988 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008989 ANDROID_STATISTICS_FACE_SCORES,
8990#ifndef USE_HAL_3_3
8991 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
8992#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07008993 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08008994 // DevCamDebug metadata result_keys_basic
8995 DEVCAMDEBUG_META_ENABLE,
8996 // DevCamDebug metadata result_keys AF
8997 DEVCAMDEBUG_AF_LENS_POSITION,
8998 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
8999 DEVCAMDEBUG_AF_TOF_DISTANCE,
9000 DEVCAMDEBUG_AF_LUMA,
9001 DEVCAMDEBUG_AF_HAF_STATE,
9002 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9003 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9004 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9005 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9006 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9007 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9008 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9009 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9010 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9011 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9012 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9013 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9014 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9015 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9016 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9017 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9018 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9019 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9020 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9021 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9022 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9023 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9024 // DevCamDebug metadata result_keys AEC
9025 DEVCAMDEBUG_AEC_TARGET_LUMA,
9026 DEVCAMDEBUG_AEC_COMP_LUMA,
9027 DEVCAMDEBUG_AEC_AVG_LUMA,
9028 DEVCAMDEBUG_AEC_CUR_LUMA,
9029 DEVCAMDEBUG_AEC_LINECOUNT,
9030 DEVCAMDEBUG_AEC_REAL_GAIN,
9031 DEVCAMDEBUG_AEC_EXP_INDEX,
9032 DEVCAMDEBUG_AEC_LUX_IDX,
9033 // DevCamDebug metadata result_keys AWB
9034 DEVCAMDEBUG_AWB_R_GAIN,
9035 DEVCAMDEBUG_AWB_G_GAIN,
9036 DEVCAMDEBUG_AWB_B_GAIN,
9037 DEVCAMDEBUG_AWB_CCT,
9038 DEVCAMDEBUG_AWB_DECISION,
9039 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009040 };
9041
Thierry Strudel3d639192016-09-09 11:52:26 -07009042 size_t result_keys_cnt =
9043 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9044
9045 Vector<int32_t> available_result_keys;
9046 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9047 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9048 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9049 }
9050 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9051 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9052 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9053 }
9054 if (supportedFaceDetectMode == 1) {
9055 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9056 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9057 } else if ((supportedFaceDetectMode == 2) ||
9058 (supportedFaceDetectMode == 3)) {
9059 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9060 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9061 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009062#ifndef USE_HAL_3_3
9063 if (hasBlackRegions) {
9064 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9065 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9066 }
9067#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009068 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9069 available_result_keys.array(), available_result_keys.size());
9070
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009071 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009072 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9073 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9074 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9075 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9076 ANDROID_SCALER_CROPPING_TYPE,
9077 ANDROID_SYNC_MAX_LATENCY,
9078 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9079 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9080 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9081 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9082 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9083 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9084 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9085 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9086 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9087 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9088 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9089 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9090 ANDROID_LENS_FACING,
9091 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9092 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9093 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9094 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9095 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9096 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9097 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9098 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9099 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9100 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9101 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9102 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9103 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9104 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9105 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9106 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9107 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9108 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9109 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9110 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9111 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
9112 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
9113 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9114 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9115 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9116 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9117 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9118 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9119 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9120 ANDROID_CONTROL_AVAILABLE_MODES,
9121 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9122 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9123 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9124 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009125 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9126#ifndef USE_HAL_3_3
9127 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9128 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9129#endif
9130 };
9131
9132 Vector<int32_t> available_characteristics_keys;
9133 available_characteristics_keys.appendArray(characteristics_keys_basic,
9134 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9135#ifndef USE_HAL_3_3
9136 if (hasBlackRegions) {
9137 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9138 }
9139#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009140 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009141 available_characteristics_keys.array(),
9142 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009143
9144 /*available stall durations depend on the hw + sw and will be different for different devices */
9145 /*have to add for raw after implementation*/
9146 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9147 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9148
9149 Vector<int64_t> available_stall_durations;
9150 for (uint32_t j = 0; j < stall_formats_count; j++) {
9151 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9152 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9153 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9154 available_stall_durations.add(stall_formats[j]);
9155 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9156 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9157 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9158 }
9159 } else {
9160 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9161 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9162 available_stall_durations.add(stall_formats[j]);
9163 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9164 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9165 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9166 }
9167 }
9168 }
9169 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9170 available_stall_durations.array(),
9171 available_stall_durations.size());
9172
9173 //QCAMERA3_OPAQUE_RAW
9174 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9175 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9176 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9177 case LEGACY_RAW:
9178 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9179 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9180 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9181 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9182 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9183 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9184 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9185 break;
9186 case MIPI_RAW:
9187 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9188 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9189 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9190 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9191 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9192 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9193 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9194 break;
9195 default:
9196 LOGE("unknown opaque_raw_format %d",
9197 gCamCapability[cameraId]->opaque_raw_fmt);
9198 break;
9199 }
9200 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9201
9202 Vector<int32_t> strides;
9203 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9204 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9205 cam_stream_buf_plane_info_t buf_planes;
9206 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9207 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9208 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9209 &gCamCapability[cameraId]->padding_info, &buf_planes);
9210 strides.add(buf_planes.plane_info.mp[0].stride);
9211 }
9212 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9213 strides.size());
9214
Thierry Strudel04e026f2016-10-10 11:27:36 -07009215 //Video HDR default
9216 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9217 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
9218 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
9219 int32_t vhdr_mode[] = {
9220 QCAMERA3_VIDEO_HDR_MODE_OFF,
9221 QCAMERA3_VIDEO_HDR_MODE_ON};
9222
9223 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9224 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9225 vhdr_mode, vhdr_mode_count);
9226 }
9227
Thierry Strudel3d639192016-09-09 11:52:26 -07009228 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9229 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9230 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9231
9232 uint8_t isMonoOnly =
9233 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9234 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9235 &isMonoOnly, 1);
9236
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009237#ifndef USE_HAL_3_3
9238 Vector<int32_t> opaque_size;
9239 for (size_t j = 0; j < scalar_formats_count; j++) {
9240 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9241 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9242 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9243 cam_stream_buf_plane_info_t buf_planes;
9244
9245 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9246 &gCamCapability[cameraId]->padding_info, &buf_planes);
9247
9248 if (rc == 0) {
9249 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9250 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9251 opaque_size.add(buf_planes.plane_info.frame_len);
9252 }else {
9253 LOGE("raw frame calculation failed!");
9254 }
9255 }
9256 }
9257 }
9258
9259 if ((opaque_size.size() > 0) &&
9260 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9261 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9262 else
9263 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9264#endif
9265
Thierry Strudel04e026f2016-10-10 11:27:36 -07009266 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9267 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9268 size = 0;
9269 count = CAM_IR_MODE_MAX;
9270 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9271 for (size_t i = 0; i < count; i++) {
9272 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9273 gCamCapability[cameraId]->supported_ir_modes[i]);
9274 if (NAME_NOT_FOUND != val) {
9275 avail_ir_modes[size] = (int32_t)val;
9276 size++;
9277 }
9278 }
9279 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9280 avail_ir_modes, size);
9281 }
9282
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009283 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9284 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9285 size = 0;
9286 count = CAM_AEC_CONVERGENCE_MAX;
9287 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9288 for (size_t i = 0; i < count; i++) {
9289 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9290 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9291 if (NAME_NOT_FOUND != val) {
9292 available_instant_aec_modes[size] = (int32_t)val;
9293 size++;
9294 }
9295 }
9296 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9297 available_instant_aec_modes, size);
9298 }
9299
Thierry Strudel3d639192016-09-09 11:52:26 -07009300 gStaticMetadata[cameraId] = staticInfo.release();
9301 return rc;
9302}
9303
9304/*===========================================================================
9305 * FUNCTION : makeTable
9306 *
9307 * DESCRIPTION: make a table of sizes
9308 *
9309 * PARAMETERS :
9310 *
9311 *
9312 *==========================================================================*/
9313void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9314 size_t max_size, int32_t *sizeTable)
9315{
9316 size_t j = 0;
9317 if (size > max_size) {
9318 size = max_size;
9319 }
9320 for (size_t i = 0; i < size; i++) {
9321 sizeTable[j] = dimTable[i].width;
9322 sizeTable[j+1] = dimTable[i].height;
9323 j+=2;
9324 }
9325}
9326
9327/*===========================================================================
9328 * FUNCTION : makeFPSTable
9329 *
9330 * DESCRIPTION: make a table of fps ranges
9331 *
9332 * PARAMETERS :
9333 *
9334 *==========================================================================*/
9335void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9336 size_t max_size, int32_t *fpsRangesTable)
9337{
9338 size_t j = 0;
9339 if (size > max_size) {
9340 size = max_size;
9341 }
9342 for (size_t i = 0; i < size; i++) {
9343 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9344 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9345 j+=2;
9346 }
9347}
9348
9349/*===========================================================================
9350 * FUNCTION : makeOverridesList
9351 *
9352 * DESCRIPTION: make a list of scene mode overrides
9353 *
9354 * PARAMETERS :
9355 *
9356 *
9357 *==========================================================================*/
9358void QCamera3HardwareInterface::makeOverridesList(
9359 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9360 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9361{
9362 /*daemon will give a list of overrides for all scene modes.
9363 However we should send the fwk only the overrides for the scene modes
9364 supported by the framework*/
9365 size_t j = 0;
9366 if (size > max_size) {
9367 size = max_size;
9368 }
9369 size_t focus_count = CAM_FOCUS_MODE_MAX;
9370 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9371 focus_count);
9372 for (size_t i = 0; i < size; i++) {
9373 bool supt = false;
9374 size_t index = supported_indexes[i];
9375 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9376 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9377 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9378 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9379 overridesTable[index].awb_mode);
9380 if (NAME_NOT_FOUND != val) {
9381 overridesList[j+1] = (uint8_t)val;
9382 }
9383 uint8_t focus_override = overridesTable[index].af_mode;
9384 for (size_t k = 0; k < focus_count; k++) {
9385 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9386 supt = true;
9387 break;
9388 }
9389 }
9390 if (supt) {
9391 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9392 focus_override);
9393 if (NAME_NOT_FOUND != val) {
9394 overridesList[j+2] = (uint8_t)val;
9395 }
9396 } else {
9397 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9398 }
9399 j+=3;
9400 }
9401}
9402
9403/*===========================================================================
9404 * FUNCTION : filterJpegSizes
9405 *
9406 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9407 * could be downscaled to
9408 *
9409 * PARAMETERS :
9410 *
9411 * RETURN : length of jpegSizes array
9412 *==========================================================================*/
9413
9414size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9415 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9416 uint8_t downscale_factor)
9417{
9418 if (0 == downscale_factor) {
9419 downscale_factor = 1;
9420 }
9421
9422 int32_t min_width = active_array_size.width / downscale_factor;
9423 int32_t min_height = active_array_size.height / downscale_factor;
9424 size_t jpegSizesCnt = 0;
9425 if (processedSizesCnt > maxCount) {
9426 processedSizesCnt = maxCount;
9427 }
9428 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9429 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9430 jpegSizes[jpegSizesCnt] = processedSizes[i];
9431 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9432 jpegSizesCnt += 2;
9433 }
9434 }
9435 return jpegSizesCnt;
9436}
9437
9438/*===========================================================================
9439 * FUNCTION : computeNoiseModelEntryS
9440 *
9441 * DESCRIPTION: function to map a given sensitivity to the S noise
9442 * model parameters in the DNG noise model.
9443 *
9444 * PARAMETERS : sens : the sensor sensitivity
9445 *
9446 ** RETURN : S (sensor amplification) noise
9447 *
9448 *==========================================================================*/
9449double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9450 double s = gCamCapability[mCameraId]->gradient_S * sens +
9451 gCamCapability[mCameraId]->offset_S;
9452 return ((s < 0.0) ? 0.0 : s);
9453}
9454
9455/*===========================================================================
9456 * FUNCTION : computeNoiseModelEntryO
9457 *
9458 * DESCRIPTION: function to map a given sensitivity to the O noise
9459 * model parameters in the DNG noise model.
9460 *
9461 * PARAMETERS : sens : the sensor sensitivity
9462 *
9463 ** RETURN : O (sensor readout) noise
9464 *
9465 *==========================================================================*/
9466double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9467 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9468 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9469 1.0 : (1.0 * sens / max_analog_sens);
9470 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9471 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9472 return ((o < 0.0) ? 0.0 : o);
9473}
9474
9475/*===========================================================================
9476 * FUNCTION : getSensorSensitivity
9477 *
9478 * DESCRIPTION: convert iso_mode to an integer value
9479 *
9480 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9481 *
9482 ** RETURN : sensitivity supported by sensor
9483 *
9484 *==========================================================================*/
9485int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9486{
9487 int32_t sensitivity;
9488
9489 switch (iso_mode) {
9490 case CAM_ISO_MODE_100:
9491 sensitivity = 100;
9492 break;
9493 case CAM_ISO_MODE_200:
9494 sensitivity = 200;
9495 break;
9496 case CAM_ISO_MODE_400:
9497 sensitivity = 400;
9498 break;
9499 case CAM_ISO_MODE_800:
9500 sensitivity = 800;
9501 break;
9502 case CAM_ISO_MODE_1600:
9503 sensitivity = 1600;
9504 break;
9505 default:
9506 sensitivity = -1;
9507 break;
9508 }
9509 return sensitivity;
9510}
9511
9512/*===========================================================================
9513 * FUNCTION : getCamInfo
9514 *
9515 * DESCRIPTION: query camera capabilities
9516 *
9517 * PARAMETERS :
9518 * @cameraId : camera Id
9519 * @info : camera info struct to be filled in with camera capabilities
9520 *
9521 * RETURN : int type of status
9522 * NO_ERROR -- success
9523 * none-zero failure code
9524 *==========================================================================*/
9525int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9526 struct camera_info *info)
9527{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009528 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009529 int rc = 0;
9530
9531 pthread_mutex_lock(&gCamLock);
9532 if (NULL == gCamCapability[cameraId]) {
9533 rc = initCapabilities(cameraId);
9534 if (rc < 0) {
9535 pthread_mutex_unlock(&gCamLock);
9536 return rc;
9537 }
9538 }
9539
9540 if (NULL == gStaticMetadata[cameraId]) {
9541 rc = initStaticMetadata(cameraId);
9542 if (rc < 0) {
9543 pthread_mutex_unlock(&gCamLock);
9544 return rc;
9545 }
9546 }
9547
9548 switch(gCamCapability[cameraId]->position) {
9549 case CAM_POSITION_BACK:
9550 case CAM_POSITION_BACK_AUX:
9551 info->facing = CAMERA_FACING_BACK;
9552 break;
9553
9554 case CAM_POSITION_FRONT:
9555 case CAM_POSITION_FRONT_AUX:
9556 info->facing = CAMERA_FACING_FRONT;
9557 break;
9558
9559 default:
9560 LOGE("Unknown position type %d for camera id:%d",
9561 gCamCapability[cameraId]->position, cameraId);
9562 rc = -1;
9563 break;
9564 }
9565
9566
9567 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009568#ifndef USE_HAL_3_3
9569 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9570#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009571 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009572#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009573 info->static_camera_characteristics = gStaticMetadata[cameraId];
9574
9575 //For now assume both cameras can operate independently.
9576 info->conflicting_devices = NULL;
9577 info->conflicting_devices_length = 0;
9578
9579 //resource cost is 100 * MIN(1.0, m/M),
9580 //where m is throughput requirement with maximum stream configuration
9581 //and M is CPP maximum throughput.
9582 float max_fps = 0.0;
9583 for (uint32_t i = 0;
9584 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9585 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9586 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9587 }
9588 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9589 gCamCapability[cameraId]->active_array_size.width *
9590 gCamCapability[cameraId]->active_array_size.height * max_fps /
9591 gCamCapability[cameraId]->max_pixel_bandwidth;
9592 info->resource_cost = 100 * MIN(1.0, ratio);
9593 LOGI("camera %d resource cost is %d", cameraId,
9594 info->resource_cost);
9595
9596 pthread_mutex_unlock(&gCamLock);
9597 return rc;
9598}
9599
9600/*===========================================================================
9601 * FUNCTION : translateCapabilityToMetadata
9602 *
9603 * DESCRIPTION: translate the capability into camera_metadata_t
9604 *
9605 * PARAMETERS : type of the request
9606 *
9607 *
9608 * RETURN : success: camera_metadata_t*
9609 * failure: NULL
9610 *
9611 *==========================================================================*/
9612camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9613{
9614 if (mDefaultMetadata[type] != NULL) {
9615 return mDefaultMetadata[type];
9616 }
9617 //first time we are handling this request
9618 //fill up the metadata structure using the wrapper class
9619 CameraMetadata settings;
9620 //translate from cam_capability_t to camera_metadata_tag_t
9621 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9622 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9623 int32_t defaultRequestID = 0;
9624 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9625
9626 /* OIS disable */
9627 char ois_prop[PROPERTY_VALUE_MAX];
9628 memset(ois_prop, 0, sizeof(ois_prop));
9629 property_get("persist.camera.ois.disable", ois_prop, "0");
9630 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9631
9632 /* Force video to use OIS */
9633 char videoOisProp[PROPERTY_VALUE_MAX];
9634 memset(videoOisProp, 0, sizeof(videoOisProp));
9635 property_get("persist.camera.ois.video", videoOisProp, "1");
9636 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009637
9638 // Hybrid AE enable/disable
9639 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9640 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9641 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9642 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9643
Thierry Strudel3d639192016-09-09 11:52:26 -07009644 uint8_t controlIntent = 0;
9645 uint8_t focusMode;
9646 uint8_t vsMode;
9647 uint8_t optStabMode;
9648 uint8_t cacMode;
9649 uint8_t edge_mode;
9650 uint8_t noise_red_mode;
9651 uint8_t tonemap_mode;
9652 bool highQualityModeEntryAvailable = FALSE;
9653 bool fastModeEntryAvailable = FALSE;
9654 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9655 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9656 switch (type) {
9657 case CAMERA3_TEMPLATE_PREVIEW:
9658 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9659 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9660 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9661 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9662 edge_mode = ANDROID_EDGE_MODE_FAST;
9663 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9664 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9665 break;
9666 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9667 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9668 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9669 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9670 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9671 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9672 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9673 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9674 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9675 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9676 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9677 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9678 highQualityModeEntryAvailable = TRUE;
9679 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9680 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9681 fastModeEntryAvailable = TRUE;
9682 }
9683 }
9684 if (highQualityModeEntryAvailable) {
9685 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9686 } else if (fastModeEntryAvailable) {
9687 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9688 }
9689 break;
9690 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9691 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9692 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9693 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009694 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9695 edge_mode = ANDROID_EDGE_MODE_FAST;
9696 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9697 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9698 if (forceVideoOis)
9699 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9700 break;
9701 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9702 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9703 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9704 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009705 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9706 edge_mode = ANDROID_EDGE_MODE_FAST;
9707 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9708 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9709 if (forceVideoOis)
9710 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9711 break;
9712 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9713 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9714 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9715 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9716 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9717 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9718 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9719 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9720 break;
9721 case CAMERA3_TEMPLATE_MANUAL:
9722 edge_mode = ANDROID_EDGE_MODE_FAST;
9723 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9724 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9725 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9726 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9727 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9728 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9729 break;
9730 default:
9731 edge_mode = ANDROID_EDGE_MODE_FAST;
9732 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9733 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9734 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9735 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9736 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9737 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9738 break;
9739 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009740 // Set CAC to OFF if underlying device doesn't support
9741 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9742 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9743 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009744 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9745 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9746 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9747 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9748 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9749 }
9750 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9751
9752 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9753 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9754 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9755 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9756 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9757 || ois_disable)
9758 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9759 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9760
9761 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9762 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9763
9764 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9765 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9766
9767 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9768 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9769
9770 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9771 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9772
9773 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9774 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9775
9776 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9777 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9778
9779 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9780 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9781
9782 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9783 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9784
9785 /*flash*/
9786 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9787 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9788
9789 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9790 settings.update(ANDROID_FLASH_FIRING_POWER,
9791 &flashFiringLevel, 1);
9792
9793 /* lens */
9794 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9795 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9796
9797 if (gCamCapability[mCameraId]->filter_densities_count) {
9798 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9799 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9800 gCamCapability[mCameraId]->filter_densities_count);
9801 }
9802
9803 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9804 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9805
9806 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9807 float default_focus_distance = 0;
9808 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9809 }
9810
9811 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9812 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9813
9814 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9815 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9816
9817 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9818 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9819
9820 /* face detection (default to OFF) */
9821 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9822 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9823
9824 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9825 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9826
9827 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9828 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9829
9830 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9831 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9832
9833 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9834 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9835
9836 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9837 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9838
9839 /* Exposure time(Update the Min Exposure Time)*/
9840 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9841 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9842
9843 /* frame duration */
9844 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9845 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9846
9847 /* sensitivity */
9848 static const int32_t default_sensitivity = 100;
9849 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009850#ifndef USE_HAL_3_3
9851 static const int32_t default_isp_sensitivity =
9852 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9853 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9854#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009855
9856 /*edge mode*/
9857 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9858
9859 /*noise reduction mode*/
9860 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9861
9862 /*color correction mode*/
9863 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9864 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9865
9866 /*transform matrix mode*/
9867 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9868
9869 int32_t scaler_crop_region[4];
9870 scaler_crop_region[0] = 0;
9871 scaler_crop_region[1] = 0;
9872 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9873 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
9874 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
9875
9876 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
9877 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
9878
9879 /*focus distance*/
9880 float focus_distance = 0.0;
9881 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
9882
9883 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009884 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -07009885 float max_range = 0.0;
9886 float max_fixed_fps = 0.0;
9887 int32_t fps_range[2] = {0, 0};
9888 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
9889 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009890 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
9891 TEMPLATE_MAX_PREVIEW_FPS) {
9892 continue;
9893 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009894 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
9895 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9896 if (type == CAMERA3_TEMPLATE_PREVIEW ||
9897 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
9898 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
9899 if (range > max_range) {
9900 fps_range[0] =
9901 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9902 fps_range[1] =
9903 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9904 max_range = range;
9905 }
9906 } else {
9907 if (range < 0.01 && max_fixed_fps <
9908 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
9909 fps_range[0] =
9910 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
9911 fps_range[1] =
9912 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9913 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
9914 }
9915 }
9916 }
9917 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
9918
9919 /*precapture trigger*/
9920 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
9921 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
9922
9923 /*af trigger*/
9924 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
9925 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
9926
9927 /* ae & af regions */
9928 int32_t active_region[] = {
9929 gCamCapability[mCameraId]->active_array_size.left,
9930 gCamCapability[mCameraId]->active_array_size.top,
9931 gCamCapability[mCameraId]->active_array_size.left +
9932 gCamCapability[mCameraId]->active_array_size.width,
9933 gCamCapability[mCameraId]->active_array_size.top +
9934 gCamCapability[mCameraId]->active_array_size.height,
9935 0};
9936 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
9937 sizeof(active_region) / sizeof(active_region[0]));
9938 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
9939 sizeof(active_region) / sizeof(active_region[0]));
9940
9941 /* black level lock */
9942 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9943 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
9944
9945 /* lens shading map mode */
9946 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9947 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
9948 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
9949 }
9950 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
9951
9952 //special defaults for manual template
9953 if (type == CAMERA3_TEMPLATE_MANUAL) {
9954 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
9955 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
9956
9957 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
9958 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
9959
9960 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
9961 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
9962
9963 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
9964 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
9965
9966 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
9967 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
9968
9969 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
9970 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
9971 }
9972
9973
9974 /* TNR
9975 * We'll use this location to determine which modes TNR will be set.
9976 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
9977 * This is not to be confused with linking on a per stream basis that decision
9978 * is still on per-session basis and will be handled as part of config stream
9979 */
9980 uint8_t tnr_enable = 0;
9981
9982 if (m_bTnrPreview || m_bTnrVideo) {
9983
9984 switch (type) {
9985 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9986 tnr_enable = 1;
9987 break;
9988
9989 default:
9990 tnr_enable = 0;
9991 break;
9992 }
9993
9994 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
9995 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
9996 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
9997
9998 LOGD("TNR:%d with process plate %d for template:%d",
9999 tnr_enable, tnr_process_type, type);
10000 }
10001
10002 //Update Link tags to default
10003 int32_t sync_type = CAM_TYPE_STANDALONE;
10004 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10005
10006 int32_t is_main = 0; //this doesn't matter as app should overwrite
10007 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10008
10009 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10010
10011 /* CDS default */
10012 char prop[PROPERTY_VALUE_MAX];
10013 memset(prop, 0, sizeof(prop));
10014 property_get("persist.camera.CDS", prop, "Auto");
10015 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10016 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10017 if (CAM_CDS_MODE_MAX == cds_mode) {
10018 cds_mode = CAM_CDS_MODE_AUTO;
10019 }
10020
10021 /* Disabling CDS in templates which have TNR enabled*/
10022 if (tnr_enable)
10023 cds_mode = CAM_CDS_MODE_OFF;
10024
10025 int32_t mode = cds_mode;
10026 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010027
10028 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
10029 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
10030
10031 /* IR Mode Default Off */
10032 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
10033 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
10034
Thierry Strudel269c81a2016-10-12 12:13:59 -070010035 /* Manual Convergence AEC Speed is disabled by default*/
10036 float default_aec_speed = 0;
10037 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10038
10039 /* Manual Convergence AWB Speed is disabled by default*/
10040 float default_awb_speed = 0;
10041 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10042
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010043 // Set instant AEC to normal convergence by default
10044 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10045 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10046
Shuzhen Wang19463d72016-03-08 11:09:52 -080010047 /* hybrid ae */
10048 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10049
Thierry Strudel3d639192016-09-09 11:52:26 -070010050 mDefaultMetadata[type] = settings.release();
10051
10052 return mDefaultMetadata[type];
10053}
10054
10055/*===========================================================================
10056 * FUNCTION : setFrameParameters
10057 *
10058 * DESCRIPTION: set parameters per frame as requested in the metadata from
10059 * framework
10060 *
10061 * PARAMETERS :
10062 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010063 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010064 * @blob_request: Whether this request is a blob request or not
10065 *
10066 * RETURN : success: NO_ERROR
10067 * failure:
10068 *==========================================================================*/
10069int QCamera3HardwareInterface::setFrameParameters(
10070 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010071 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010072 int blob_request,
10073 uint32_t snapshotStreamId)
10074{
10075 /*translate from camera_metadata_t type to parm_type_t*/
10076 int rc = 0;
10077 int32_t hal_version = CAM_HAL_V3;
10078
10079 clear_metadata_buffer(mParameters);
10080 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10081 LOGE("Failed to set hal version in the parameters");
10082 return BAD_VALUE;
10083 }
10084
10085 /*we need to update the frame number in the parameters*/
10086 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10087 request->frame_number)) {
10088 LOGE("Failed to set the frame number in the parameters");
10089 return BAD_VALUE;
10090 }
10091
10092 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010094 LOGE("Failed to set stream type mask in the parameters");
10095 return BAD_VALUE;
10096 }
10097
10098 if (mUpdateDebugLevel) {
10099 uint32_t dummyDebugLevel = 0;
10100 /* The value of dummyDebugLevel is irrelavent. On
10101 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10103 dummyDebugLevel)) {
10104 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10105 return BAD_VALUE;
10106 }
10107 mUpdateDebugLevel = false;
10108 }
10109
10110 if(request->settings != NULL){
10111 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10112 if (blob_request)
10113 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10114 }
10115
10116 return rc;
10117}
10118
10119/*===========================================================================
10120 * FUNCTION : setReprocParameters
10121 *
10122 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10123 * return it.
10124 *
10125 * PARAMETERS :
10126 * @request : request that needs to be serviced
10127 *
10128 * RETURN : success: NO_ERROR
10129 * failure:
10130 *==========================================================================*/
10131int32_t QCamera3HardwareInterface::setReprocParameters(
10132 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10133 uint32_t snapshotStreamId)
10134{
10135 /*translate from camera_metadata_t type to parm_type_t*/
10136 int rc = 0;
10137
10138 if (NULL == request->settings){
10139 LOGE("Reprocess settings cannot be NULL");
10140 return BAD_VALUE;
10141 }
10142
10143 if (NULL == reprocParam) {
10144 LOGE("Invalid reprocessing metadata buffer");
10145 return BAD_VALUE;
10146 }
10147 clear_metadata_buffer(reprocParam);
10148
10149 /*we need to update the frame number in the parameters*/
10150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10151 request->frame_number)) {
10152 LOGE("Failed to set the frame number in the parameters");
10153 return BAD_VALUE;
10154 }
10155
10156 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10157 if (rc < 0) {
10158 LOGE("Failed to translate reproc request");
10159 return rc;
10160 }
10161
10162 CameraMetadata frame_settings;
10163 frame_settings = request->settings;
10164 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10165 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10166 int32_t *crop_count =
10167 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10168 int32_t *crop_data =
10169 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10170 int32_t *roi_map =
10171 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10172 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10173 cam_crop_data_t crop_meta;
10174 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10175 crop_meta.num_of_streams = 1;
10176 crop_meta.crop_info[0].crop.left = crop_data[0];
10177 crop_meta.crop_info[0].crop.top = crop_data[1];
10178 crop_meta.crop_info[0].crop.width = crop_data[2];
10179 crop_meta.crop_info[0].crop.height = crop_data[3];
10180
10181 crop_meta.crop_info[0].roi_map.left =
10182 roi_map[0];
10183 crop_meta.crop_info[0].roi_map.top =
10184 roi_map[1];
10185 crop_meta.crop_info[0].roi_map.width =
10186 roi_map[2];
10187 crop_meta.crop_info[0].roi_map.height =
10188 roi_map[3];
10189
10190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10191 rc = BAD_VALUE;
10192 }
10193 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10194 request->input_buffer->stream,
10195 crop_meta.crop_info[0].crop.left,
10196 crop_meta.crop_info[0].crop.top,
10197 crop_meta.crop_info[0].crop.width,
10198 crop_meta.crop_info[0].crop.height);
10199 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10200 request->input_buffer->stream,
10201 crop_meta.crop_info[0].roi_map.left,
10202 crop_meta.crop_info[0].roi_map.top,
10203 crop_meta.crop_info[0].roi_map.width,
10204 crop_meta.crop_info[0].roi_map.height);
10205 } else {
10206 LOGE("Invalid reprocess crop count %d!", *crop_count);
10207 }
10208 } else {
10209 LOGE("No crop data from matching output stream");
10210 }
10211
10212 /* These settings are not needed for regular requests so handle them specially for
10213 reprocess requests; information needed for EXIF tags */
10214 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10215 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10216 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10217 if (NAME_NOT_FOUND != val) {
10218 uint32_t flashMode = (uint32_t)val;
10219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10220 rc = BAD_VALUE;
10221 }
10222 } else {
10223 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10224 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10225 }
10226 } else {
10227 LOGH("No flash mode in reprocess settings");
10228 }
10229
10230 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10231 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10233 rc = BAD_VALUE;
10234 }
10235 } else {
10236 LOGH("No flash state in reprocess settings");
10237 }
10238
10239 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10240 uint8_t *reprocessFlags =
10241 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10243 *reprocessFlags)) {
10244 rc = BAD_VALUE;
10245 }
10246 }
10247
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010248 // Add metadata which reprocess needs
10249 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10250 cam_reprocess_info_t *repro_info =
10251 (cam_reprocess_info_t *)frame_settings.find
10252 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010253 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010254 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010255 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010256 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010257 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010258 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010259 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010260 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010261 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010262 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010263 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010264 repro_info->pipeline_flip);
10265 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10266 repro_info->af_roi);
10267 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10268 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010269 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10270 CAM_INTF_PARM_ROTATION metadata then has been added in
10271 translateToHalMetadata. HAL need to keep this new rotation
10272 metadata. Otherwise, the old rotation info saved in the vendor tag
10273 would be used */
10274 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10275 CAM_INTF_PARM_ROTATION, reprocParam) {
10276 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10277 } else {
10278 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010279 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010280 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 }
10282
10283 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10284 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10285 roi.width and roi.height would be the final JPEG size.
10286 For now, HAL only checks this for reprocess request */
10287 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10288 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10289 uint8_t *enable =
10290 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10291 if (*enable == TRUE) {
10292 int32_t *crop_data =
10293 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10294 cam_stream_crop_info_t crop_meta;
10295 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10296 crop_meta.stream_id = 0;
10297 crop_meta.crop.left = crop_data[0];
10298 crop_meta.crop.top = crop_data[1];
10299 crop_meta.crop.width = crop_data[2];
10300 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010301 // The JPEG crop roi should match cpp output size
10302 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10303 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10304 crop_meta.roi_map.left = 0;
10305 crop_meta.roi_map.top = 0;
10306 crop_meta.roi_map.width = cpp_crop->crop.width;
10307 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010308 }
10309 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10310 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010311 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010312 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010313 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10314 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010315 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010316 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10317
10318 // Add JPEG scale information
10319 cam_dimension_t scale_dim;
10320 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10321 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10322 int32_t *roi =
10323 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10324 scale_dim.width = roi[2];
10325 scale_dim.height = roi[3];
10326 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10327 scale_dim);
10328 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10329 scale_dim.width, scale_dim.height, mCameraId);
10330 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010331 }
10332 }
10333
10334 return rc;
10335}
10336
10337/*===========================================================================
10338 * FUNCTION : saveRequestSettings
10339 *
10340 * DESCRIPTION: Add any settings that might have changed to the request settings
10341 * and save the settings to be applied on the frame
10342 *
10343 * PARAMETERS :
10344 * @jpegMetadata : the extracted and/or modified jpeg metadata
10345 * @request : request with initial settings
10346 *
10347 * RETURN :
10348 * camera_metadata_t* : pointer to the saved request settings
10349 *==========================================================================*/
10350camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10351 const CameraMetadata &jpegMetadata,
10352 camera3_capture_request_t *request)
10353{
10354 camera_metadata_t *resultMetadata;
10355 CameraMetadata camMetadata;
10356 camMetadata = request->settings;
10357
10358 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10359 int32_t thumbnail_size[2];
10360 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10361 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10362 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10363 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10364 }
10365
10366 if (request->input_buffer != NULL) {
10367 uint8_t reprocessFlags = 1;
10368 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10369 (uint8_t*)&reprocessFlags,
10370 sizeof(reprocessFlags));
10371 }
10372
10373 resultMetadata = camMetadata.release();
10374 return resultMetadata;
10375}
10376
10377/*===========================================================================
10378 * FUNCTION : setHalFpsRange
10379 *
10380 * DESCRIPTION: set FPS range parameter
10381 *
10382 *
10383 * PARAMETERS :
10384 * @settings : Metadata from framework
10385 * @hal_metadata: Metadata buffer
10386 *
10387 *
10388 * RETURN : success: NO_ERROR
10389 * failure:
10390 *==========================================================================*/
10391int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10392 metadata_buffer_t *hal_metadata)
10393{
10394 int32_t rc = NO_ERROR;
10395 cam_fps_range_t fps_range;
10396 fps_range.min_fps = (float)
10397 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10398 fps_range.max_fps = (float)
10399 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10400 fps_range.video_min_fps = fps_range.min_fps;
10401 fps_range.video_max_fps = fps_range.max_fps;
10402
10403 LOGD("aeTargetFpsRange fps: [%f %f]",
10404 fps_range.min_fps, fps_range.max_fps);
10405 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10406 * follows:
10407 * ---------------------------------------------------------------|
10408 * Video stream is absent in configure_streams |
10409 * (Camcorder preview before the first video record |
10410 * ---------------------------------------------------------------|
10411 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10412 * | | | vid_min/max_fps|
10413 * ---------------------------------------------------------------|
10414 * NO | [ 30, 240] | 240 | [240, 240] |
10415 * |-------------|-------------|----------------|
10416 * | [240, 240] | 240 | [240, 240] |
10417 * ---------------------------------------------------------------|
10418 * Video stream is present in configure_streams |
10419 * ---------------------------------------------------------------|
10420 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10421 * | | | vid_min/max_fps|
10422 * ---------------------------------------------------------------|
10423 * NO | [ 30, 240] | 240 | [240, 240] |
10424 * (camcorder prev |-------------|-------------|----------------|
10425 * after video rec | [240, 240] | 240 | [240, 240] |
10426 * is stopped) | | | |
10427 * ---------------------------------------------------------------|
10428 * YES | [ 30, 240] | 240 | [240, 240] |
10429 * |-------------|-------------|----------------|
10430 * | [240, 240] | 240 | [240, 240] |
10431 * ---------------------------------------------------------------|
10432 * When Video stream is absent in configure_streams,
10433 * preview fps = sensor_fps / batchsize
10434 * Eg: for 240fps at batchSize 4, preview = 60fps
10435 * for 120fps at batchSize 4, preview = 30fps
10436 *
10437 * When video stream is present in configure_streams, preview fps is as per
10438 * the ratio of preview buffers to video buffers requested in process
10439 * capture request
10440 */
10441 mBatchSize = 0;
10442 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10443 fps_range.min_fps = fps_range.video_max_fps;
10444 fps_range.video_min_fps = fps_range.video_max_fps;
10445 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10446 fps_range.max_fps);
10447 if (NAME_NOT_FOUND != val) {
10448 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10450 return BAD_VALUE;
10451 }
10452
10453 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10454 /* If batchmode is currently in progress and the fps changes,
10455 * set the flag to restart the sensor */
10456 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10457 (mHFRVideoFps != fps_range.max_fps)) {
10458 mNeedSensorRestart = true;
10459 }
10460 mHFRVideoFps = fps_range.max_fps;
10461 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10462 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10463 mBatchSize = MAX_HFR_BATCH_SIZE;
10464 }
10465 }
10466 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10467
10468 }
10469 } else {
10470 /* HFR mode is session param in backend/ISP. This should be reset when
10471 * in non-HFR mode */
10472 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10474 return BAD_VALUE;
10475 }
10476 }
10477 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10478 return BAD_VALUE;
10479 }
10480 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10481 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10482 return rc;
10483}
10484
10485/*===========================================================================
10486 * FUNCTION : translateToHalMetadata
10487 *
10488 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10489 *
10490 *
10491 * PARAMETERS :
10492 * @request : request sent from framework
10493 *
10494 *
10495 * RETURN : success: NO_ERROR
10496 * failure:
10497 *==========================================================================*/
10498int QCamera3HardwareInterface::translateToHalMetadata
10499 (const camera3_capture_request_t *request,
10500 metadata_buffer_t *hal_metadata,
10501 uint32_t snapshotStreamId)
10502{
10503 int rc = 0;
10504 CameraMetadata frame_settings;
10505 frame_settings = request->settings;
10506
10507 /* Do not change the order of the following list unless you know what you are
10508 * doing.
10509 * The order is laid out in such a way that parameters in the front of the table
10510 * may be used to override the parameters later in the table. Examples are:
10511 * 1. META_MODE should precede AEC/AWB/AF MODE
10512 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10513 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10514 * 4. Any mode should precede it's corresponding settings
10515 */
10516 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10517 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10519 rc = BAD_VALUE;
10520 }
10521 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10522 if (rc != NO_ERROR) {
10523 LOGE("extractSceneMode failed");
10524 }
10525 }
10526
10527 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10528 uint8_t fwk_aeMode =
10529 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10530 uint8_t aeMode;
10531 int32_t redeye;
10532
10533 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10534 aeMode = CAM_AE_MODE_OFF;
10535 } else {
10536 aeMode = CAM_AE_MODE_ON;
10537 }
10538 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10539 redeye = 1;
10540 } else {
10541 redeye = 0;
10542 }
10543
10544 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10545 fwk_aeMode);
10546 if (NAME_NOT_FOUND != val) {
10547 int32_t flashMode = (int32_t)val;
10548 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10549 }
10550
10551 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10553 rc = BAD_VALUE;
10554 }
10555 }
10556
10557 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10558 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10559 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10560 fwk_whiteLevel);
10561 if (NAME_NOT_FOUND != val) {
10562 uint8_t whiteLevel = (uint8_t)val;
10563 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10564 rc = BAD_VALUE;
10565 }
10566 }
10567 }
10568
10569 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10570 uint8_t fwk_cacMode =
10571 frame_settings.find(
10572 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10573 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10574 fwk_cacMode);
10575 if (NAME_NOT_FOUND != val) {
10576 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10577 bool entryAvailable = FALSE;
10578 // Check whether Frameworks set CAC mode is supported in device or not
10579 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10580 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10581 entryAvailable = TRUE;
10582 break;
10583 }
10584 }
10585 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10586 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10587 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10588 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10589 if (entryAvailable == FALSE) {
10590 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10591 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10592 } else {
10593 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10594 // High is not supported and so set the FAST as spec say's underlying
10595 // device implementation can be the same for both modes.
10596 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10597 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10598 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10599 // in order to avoid the fps drop due to high quality
10600 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10601 } else {
10602 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10603 }
10604 }
10605 }
10606 LOGD("Final cacMode is %d", cacMode);
10607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10608 rc = BAD_VALUE;
10609 }
10610 } else {
10611 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10612 }
10613 }
10614
10615 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10616 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10617 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10618 fwk_focusMode);
10619 if (NAME_NOT_FOUND != val) {
10620 uint8_t focusMode = (uint8_t)val;
10621 LOGD("set focus mode %d", focusMode);
10622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10623 rc = BAD_VALUE;
10624 }
10625 }
10626 }
10627
10628 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10629 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10631 focalDistance)) {
10632 rc = BAD_VALUE;
10633 }
10634 }
10635
10636 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10637 uint8_t fwk_antibandingMode =
10638 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10639 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10640 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10641 if (NAME_NOT_FOUND != val) {
10642 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010643 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10644 if (m60HzZone) {
10645 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10646 } else {
10647 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10648 }
10649 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10651 hal_antibandingMode)) {
10652 rc = BAD_VALUE;
10653 }
10654 }
10655 }
10656
10657 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10658 int32_t expCompensation = frame_settings.find(
10659 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10660 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10661 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10662 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10663 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010664 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010665 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10666 expCompensation)) {
10667 rc = BAD_VALUE;
10668 }
10669 }
10670
10671 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10672 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10674 rc = BAD_VALUE;
10675 }
10676 }
10677 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10678 rc = setHalFpsRange(frame_settings, hal_metadata);
10679 if (rc != NO_ERROR) {
10680 LOGE("setHalFpsRange failed");
10681 }
10682 }
10683
10684 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10685 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10687 rc = BAD_VALUE;
10688 }
10689 }
10690
10691 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10692 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10693 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10694 fwk_effectMode);
10695 if (NAME_NOT_FOUND != val) {
10696 uint8_t effectMode = (uint8_t)val;
10697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10698 rc = BAD_VALUE;
10699 }
10700 }
10701 }
10702
10703 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10704 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10706 colorCorrectMode)) {
10707 rc = BAD_VALUE;
10708 }
10709 }
10710
10711 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10712 cam_color_correct_gains_t colorCorrectGains;
10713 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10714 colorCorrectGains.gains[i] =
10715 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10716 }
10717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10718 colorCorrectGains)) {
10719 rc = BAD_VALUE;
10720 }
10721 }
10722
10723 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10724 cam_color_correct_matrix_t colorCorrectTransform;
10725 cam_rational_type_t transform_elem;
10726 size_t num = 0;
10727 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10728 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10729 transform_elem.numerator =
10730 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10731 transform_elem.denominator =
10732 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10733 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10734 num++;
10735 }
10736 }
10737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10738 colorCorrectTransform)) {
10739 rc = BAD_VALUE;
10740 }
10741 }
10742
10743 cam_trigger_t aecTrigger;
10744 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10745 aecTrigger.trigger_id = -1;
10746 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10747 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10748 aecTrigger.trigger =
10749 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10750 aecTrigger.trigger_id =
10751 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10753 aecTrigger)) {
10754 rc = BAD_VALUE;
10755 }
10756 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10757 aecTrigger.trigger, aecTrigger.trigger_id);
10758 }
10759
10760 /*af_trigger must come with a trigger id*/
10761 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10762 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10763 cam_trigger_t af_trigger;
10764 af_trigger.trigger =
10765 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10766 af_trigger.trigger_id =
10767 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10769 rc = BAD_VALUE;
10770 }
10771 LOGD("AfTrigger: %d AfTriggerID: %d",
10772 af_trigger.trigger, af_trigger.trigger_id);
10773 }
10774
10775 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10776 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10777 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10778 rc = BAD_VALUE;
10779 }
10780 }
10781 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10782 cam_edge_application_t edge_application;
10783 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10784 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10785 edge_application.sharpness = 0;
10786 } else {
10787 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10788 }
10789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10790 rc = BAD_VALUE;
10791 }
10792 }
10793
10794 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10795 int32_t respectFlashMode = 1;
10796 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10797 uint8_t fwk_aeMode =
10798 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10799 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10800 respectFlashMode = 0;
10801 LOGH("AE Mode controls flash, ignore android.flash.mode");
10802 }
10803 }
10804 if (respectFlashMode) {
10805 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10806 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10807 LOGH("flash mode after mapping %d", val);
10808 // To check: CAM_INTF_META_FLASH_MODE usage
10809 if (NAME_NOT_FOUND != val) {
10810 uint8_t flashMode = (uint8_t)val;
10811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10812 rc = BAD_VALUE;
10813 }
10814 }
10815 }
10816 }
10817
10818 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10819 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10821 rc = BAD_VALUE;
10822 }
10823 }
10824
10825 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10826 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10828 flashFiringTime)) {
10829 rc = BAD_VALUE;
10830 }
10831 }
10832
10833 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10834 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10836 hotPixelMode)) {
10837 rc = BAD_VALUE;
10838 }
10839 }
10840
10841 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10842 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10844 lensAperture)) {
10845 rc = BAD_VALUE;
10846 }
10847 }
10848
10849 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10850 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10852 filterDensity)) {
10853 rc = BAD_VALUE;
10854 }
10855 }
10856
10857 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10858 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10860 focalLength)) {
10861 rc = BAD_VALUE;
10862 }
10863 }
10864
10865 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10866 uint8_t optStabMode =
10867 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10869 optStabMode)) {
10870 rc = BAD_VALUE;
10871 }
10872 }
10873
10874 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
10875 uint8_t videoStabMode =
10876 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
10877 LOGD("videoStabMode from APP = %d", videoStabMode);
10878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
10879 videoStabMode)) {
10880 rc = BAD_VALUE;
10881 }
10882 }
10883
10884
10885 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
10886 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
10887 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
10888 noiseRedMode)) {
10889 rc = BAD_VALUE;
10890 }
10891 }
10892
10893 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
10894 float reprocessEffectiveExposureFactor =
10895 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
10896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
10897 reprocessEffectiveExposureFactor)) {
10898 rc = BAD_VALUE;
10899 }
10900 }
10901
10902 cam_crop_region_t scalerCropRegion;
10903 bool scalerCropSet = false;
10904 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
10905 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
10906 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
10907 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
10908 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
10909
10910 // Map coordinate system from active array to sensor output.
10911 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
10912 scalerCropRegion.width, scalerCropRegion.height);
10913
10914 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
10915 scalerCropRegion)) {
10916 rc = BAD_VALUE;
10917 }
10918 scalerCropSet = true;
10919 }
10920
10921 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
10922 int64_t sensorExpTime =
10923 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
10924 LOGD("setting sensorExpTime %lld", sensorExpTime);
10925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
10926 sensorExpTime)) {
10927 rc = BAD_VALUE;
10928 }
10929 }
10930
10931 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
10932 int64_t sensorFrameDuration =
10933 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
10934 int64_t minFrameDuration = getMinFrameDuration(request);
10935 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
10936 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
10937 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
10938 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
10939 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
10940 sensorFrameDuration)) {
10941 rc = BAD_VALUE;
10942 }
10943 }
10944
10945 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
10946 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
10947 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
10948 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
10949 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
10950 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
10951 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
10952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
10953 sensorSensitivity)) {
10954 rc = BAD_VALUE;
10955 }
10956 }
10957
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010958#ifndef USE_HAL_3_3
10959 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
10960 int32_t ispSensitivity =
10961 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
10962 if (ispSensitivity <
10963 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
10964 ispSensitivity =
10965 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10966 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10967 }
10968 if (ispSensitivity >
10969 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
10970 ispSensitivity =
10971 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
10972 LOGD("clamp ispSensitivity to %d", ispSensitivity);
10973 }
10974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
10975 ispSensitivity)) {
10976 rc = BAD_VALUE;
10977 }
10978 }
10979#endif
10980
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
10982 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
10983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
10984 rc = BAD_VALUE;
10985 }
10986 }
10987
10988 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
10989 uint8_t fwk_facedetectMode =
10990 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
10991
10992 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
10993 fwk_facedetectMode);
10994
10995 if (NAME_NOT_FOUND != val) {
10996 uint8_t facedetectMode = (uint8_t)val;
10997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
10998 facedetectMode)) {
10999 rc = BAD_VALUE;
11000 }
11001 }
11002 }
11003
11004 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
11005 uint8_t histogramMode =
11006 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
11007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11008 histogramMode)) {
11009 rc = BAD_VALUE;
11010 }
11011 }
11012
11013 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11014 uint8_t sharpnessMapMode =
11015 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11016 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11017 sharpnessMapMode)) {
11018 rc = BAD_VALUE;
11019 }
11020 }
11021
11022 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11023 uint8_t tonemapMode =
11024 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11026 rc = BAD_VALUE;
11027 }
11028 }
11029 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11030 /*All tonemap channels will have the same number of points*/
11031 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11032 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11033 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11034 cam_rgb_tonemap_curves tonemapCurves;
11035 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11036 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11037 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11038 tonemapCurves.tonemap_points_cnt,
11039 CAM_MAX_TONEMAP_CURVE_SIZE);
11040 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11041 }
11042
11043 /* ch0 = G*/
11044 size_t point = 0;
11045 cam_tonemap_curve_t tonemapCurveGreen;
11046 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11047 for (size_t j = 0; j < 2; j++) {
11048 tonemapCurveGreen.tonemap_points[i][j] =
11049 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11050 point++;
11051 }
11052 }
11053 tonemapCurves.curves[0] = tonemapCurveGreen;
11054
11055 /* ch 1 = B */
11056 point = 0;
11057 cam_tonemap_curve_t tonemapCurveBlue;
11058 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11059 for (size_t j = 0; j < 2; j++) {
11060 tonemapCurveBlue.tonemap_points[i][j] =
11061 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11062 point++;
11063 }
11064 }
11065 tonemapCurves.curves[1] = tonemapCurveBlue;
11066
11067 /* ch 2 = R */
11068 point = 0;
11069 cam_tonemap_curve_t tonemapCurveRed;
11070 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11071 for (size_t j = 0; j < 2; j++) {
11072 tonemapCurveRed.tonemap_points[i][j] =
11073 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11074 point++;
11075 }
11076 }
11077 tonemapCurves.curves[2] = tonemapCurveRed;
11078
11079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11080 tonemapCurves)) {
11081 rc = BAD_VALUE;
11082 }
11083 }
11084
11085 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11086 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11088 captureIntent)) {
11089 rc = BAD_VALUE;
11090 }
11091 }
11092
11093 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11094 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11096 blackLevelLock)) {
11097 rc = BAD_VALUE;
11098 }
11099 }
11100
11101 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11102 uint8_t lensShadingMapMode =
11103 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11105 lensShadingMapMode)) {
11106 rc = BAD_VALUE;
11107 }
11108 }
11109
11110 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11111 cam_area_t roi;
11112 bool reset = true;
11113 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
11114
11115 // Map coordinate system from active array to sensor output.
11116 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11117 roi.rect.height);
11118
11119 if (scalerCropSet) {
11120 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11121 }
11122 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11123 rc = BAD_VALUE;
11124 }
11125 }
11126
11127 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11128 cam_area_t roi;
11129 bool reset = true;
11130 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
11131
11132 // Map coordinate system from active array to sensor output.
11133 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11134 roi.rect.height);
11135
11136 if (scalerCropSet) {
11137 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11138 }
11139 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11140 rc = BAD_VALUE;
11141 }
11142 }
11143
11144 // CDS for non-HFR non-video mode
11145 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11146 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11147 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11148 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11149 LOGE("Invalid CDS mode %d!", *fwk_cds);
11150 } else {
11151 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11152 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11153 rc = BAD_VALUE;
11154 }
11155 }
11156 }
11157
Thierry Strudel04e026f2016-10-10 11:27:36 -070011158 // Video HDR
11159 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
11160 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
11161 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11162 rc = setVideoHdrMode(mParameters, vhdr);
11163 if (rc != NO_ERROR) {
11164 LOGE("setVideoHDR is failed");
11165 }
11166 }
11167
11168 //IR
11169 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11170 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11171 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
11172 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11173 LOGE("Invalid IR mode %d!", fwk_ir);
11174 } else {
11175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11176 CAM_INTF_META_IR_MODE, fwk_ir)) {
11177 rc = BAD_VALUE;
11178 }
11179 }
11180 }
11181
Thierry Strudel269c81a2016-10-12 12:13:59 -070011182 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11183 float aec_speed;
11184 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11185 LOGD("AEC Speed :%f", aec_speed);
11186 if ( aec_speed < 0 ) {
11187 LOGE("Invalid AEC mode %f!", aec_speed);
11188 } else {
11189 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11190 aec_speed)) {
11191 rc = BAD_VALUE;
11192 }
11193 }
11194 }
11195
11196 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11197 float awb_speed;
11198 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11199 LOGD("AWB Speed :%f", awb_speed);
11200 if ( awb_speed < 0 ) {
11201 LOGE("Invalid AWB mode %f!", awb_speed);
11202 } else {
11203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11204 awb_speed)) {
11205 rc = BAD_VALUE;
11206 }
11207 }
11208 }
11209
Thierry Strudel3d639192016-09-09 11:52:26 -070011210 // TNR
11211 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11212 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11213 uint8_t b_TnrRequested = 0;
11214 cam_denoise_param_t tnr;
11215 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11216 tnr.process_plates =
11217 (cam_denoise_process_type_t)frame_settings.find(
11218 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11219 b_TnrRequested = tnr.denoise_enable;
11220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11221 rc = BAD_VALUE;
11222 }
11223 }
11224
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011225 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
11226 int32_t* exposure_metering_mode =
11227 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
11228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11229 *exposure_metering_mode)) {
11230 rc = BAD_VALUE;
11231 }
11232 }
11233
Thierry Strudel3d639192016-09-09 11:52:26 -070011234 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11235 int32_t fwk_testPatternMode =
11236 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11237 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11238 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11239
11240 if (NAME_NOT_FOUND != testPatternMode) {
11241 cam_test_pattern_data_t testPatternData;
11242 memset(&testPatternData, 0, sizeof(testPatternData));
11243 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11244 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11245 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11246 int32_t *fwk_testPatternData =
11247 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11248 testPatternData.r = fwk_testPatternData[0];
11249 testPatternData.b = fwk_testPatternData[3];
11250 switch (gCamCapability[mCameraId]->color_arrangement) {
11251 case CAM_FILTER_ARRANGEMENT_RGGB:
11252 case CAM_FILTER_ARRANGEMENT_GRBG:
11253 testPatternData.gr = fwk_testPatternData[1];
11254 testPatternData.gb = fwk_testPatternData[2];
11255 break;
11256 case CAM_FILTER_ARRANGEMENT_GBRG:
11257 case CAM_FILTER_ARRANGEMENT_BGGR:
11258 testPatternData.gr = fwk_testPatternData[2];
11259 testPatternData.gb = fwk_testPatternData[1];
11260 break;
11261 default:
11262 LOGE("color arrangement %d is not supported",
11263 gCamCapability[mCameraId]->color_arrangement);
11264 break;
11265 }
11266 }
11267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11268 testPatternData)) {
11269 rc = BAD_VALUE;
11270 }
11271 } else {
11272 LOGE("Invalid framework sensor test pattern mode %d",
11273 fwk_testPatternMode);
11274 }
11275 }
11276
11277 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11278 size_t count = 0;
11279 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11280 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11281 gps_coords.data.d, gps_coords.count, count);
11282 if (gps_coords.count != count) {
11283 rc = BAD_VALUE;
11284 }
11285 }
11286
11287 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11288 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11289 size_t count = 0;
11290 const char *gps_methods_src = (const char *)
11291 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11292 memset(gps_methods, '\0', sizeof(gps_methods));
11293 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11294 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11295 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11296 if (GPS_PROCESSING_METHOD_SIZE != count) {
11297 rc = BAD_VALUE;
11298 }
11299 }
11300
11301 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11302 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11304 gps_timestamp)) {
11305 rc = BAD_VALUE;
11306 }
11307 }
11308
11309 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11310 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11311 cam_rotation_info_t rotation_info;
11312 if (orientation == 0) {
11313 rotation_info.rotation = ROTATE_0;
11314 } else if (orientation == 90) {
11315 rotation_info.rotation = ROTATE_90;
11316 } else if (orientation == 180) {
11317 rotation_info.rotation = ROTATE_180;
11318 } else if (orientation == 270) {
11319 rotation_info.rotation = ROTATE_270;
11320 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011321 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011322 rotation_info.streamId = snapshotStreamId;
11323 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11324 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11325 rc = BAD_VALUE;
11326 }
11327 }
11328
11329 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11330 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11332 rc = BAD_VALUE;
11333 }
11334 }
11335
11336 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11337 uint32_t thumb_quality = (uint32_t)
11338 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11340 thumb_quality)) {
11341 rc = BAD_VALUE;
11342 }
11343 }
11344
11345 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11346 cam_dimension_t dim;
11347 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11348 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11350 rc = BAD_VALUE;
11351 }
11352 }
11353
11354 // Internal metadata
11355 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11356 size_t count = 0;
11357 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11358 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11359 privatedata.data.i32, privatedata.count, count);
11360 if (privatedata.count != count) {
11361 rc = BAD_VALUE;
11362 }
11363 }
11364
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011365 // ISO/Exposure Priority
11366 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11367 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11368 cam_priority_mode_t mode =
11369 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11370 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11371 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11372 use_iso_exp_pty.previewOnly = FALSE;
11373 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11374 use_iso_exp_pty.value = *ptr;
11375
11376 if(CAM_ISO_PRIORITY == mode) {
11377 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11378 use_iso_exp_pty)) {
11379 rc = BAD_VALUE;
11380 }
11381 }
11382 else {
11383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11384 use_iso_exp_pty)) {
11385 rc = BAD_VALUE;
11386 }
11387 }
11388 }
11389 }
11390
11391 // Saturation
11392 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11393 int32_t* use_saturation =
11394 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11396 rc = BAD_VALUE;
11397 }
11398 }
11399
Thierry Strudel3d639192016-09-09 11:52:26 -070011400 // EV step
11401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11402 gCamCapability[mCameraId]->exp_compensation_step)) {
11403 rc = BAD_VALUE;
11404 }
11405
11406 // CDS info
11407 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11408 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11409 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11410
11411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11412 CAM_INTF_META_CDS_DATA, *cdsData)) {
11413 rc = BAD_VALUE;
11414 }
11415 }
11416
Shuzhen Wang19463d72016-03-08 11:09:52 -080011417 // Hybrid AE
11418 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11419 uint8_t *hybrid_ae = (uint8_t *)
11420 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11421
11422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11423 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11424 rc = BAD_VALUE;
11425 }
11426 }
11427
Thierry Strudel3d639192016-09-09 11:52:26 -070011428 return rc;
11429}
11430
11431/*===========================================================================
11432 * FUNCTION : captureResultCb
11433 *
11434 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11435 *
11436 * PARAMETERS :
11437 * @frame : frame information from mm-camera-interface
11438 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11439 * @userdata: userdata
11440 *
11441 * RETURN : NONE
11442 *==========================================================================*/
11443void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11444 camera3_stream_buffer_t *buffer,
11445 uint32_t frame_number, bool isInputBuffer, void *userdata)
11446{
11447 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11448 if (hw == NULL) {
11449 LOGE("Invalid hw %p", hw);
11450 return;
11451 }
11452
11453 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11454 return;
11455}
11456
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011457/*===========================================================================
11458 * FUNCTION : setBufferErrorStatus
11459 *
11460 * DESCRIPTION: Callback handler for channels to report any buffer errors
11461 *
11462 * PARAMETERS :
11463 * @ch : Channel on which buffer error is reported from
11464 * @frame_number : frame number on which buffer error is reported on
11465 * @buffer_status : buffer error status
11466 * @userdata: userdata
11467 *
11468 * RETURN : NONE
11469 *==========================================================================*/
11470void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11471 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11472{
11473 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11474 if (hw == NULL) {
11475 LOGE("Invalid hw %p", hw);
11476 return;
11477 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011478
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011479 hw->setBufferErrorStatus(ch, frame_number, err);
11480 return;
11481}
11482
11483void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11484 uint32_t frameNumber, camera3_buffer_status_t err)
11485{
11486 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11487 pthread_mutex_lock(&mMutex);
11488
11489 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11490 if (req.frame_number != frameNumber)
11491 continue;
11492 for (auto& k : req.mPendingBufferList) {
11493 if(k.stream->priv == ch) {
11494 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11495 }
11496 }
11497 }
11498
11499 pthread_mutex_unlock(&mMutex);
11500 return;
11501}
Thierry Strudel3d639192016-09-09 11:52:26 -070011502/*===========================================================================
11503 * FUNCTION : initialize
11504 *
11505 * DESCRIPTION: Pass framework callback pointers to HAL
11506 *
11507 * PARAMETERS :
11508 *
11509 *
11510 * RETURN : Success : 0
11511 * Failure: -ENODEV
11512 *==========================================================================*/
11513
11514int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11515 const camera3_callback_ops_t *callback_ops)
11516{
11517 LOGD("E");
11518 QCamera3HardwareInterface *hw =
11519 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11520 if (!hw) {
11521 LOGE("NULL camera device");
11522 return -ENODEV;
11523 }
11524
11525 int rc = hw->initialize(callback_ops);
11526 LOGD("X");
11527 return rc;
11528}
11529
11530/*===========================================================================
11531 * FUNCTION : configure_streams
11532 *
11533 * DESCRIPTION:
11534 *
11535 * PARAMETERS :
11536 *
11537 *
11538 * RETURN : Success: 0
11539 * Failure: -EINVAL (if stream configuration is invalid)
11540 * -ENODEV (fatal error)
11541 *==========================================================================*/
11542
11543int QCamera3HardwareInterface::configure_streams(
11544 const struct camera3_device *device,
11545 camera3_stream_configuration_t *stream_list)
11546{
11547 LOGD("E");
11548 QCamera3HardwareInterface *hw =
11549 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11550 if (!hw) {
11551 LOGE("NULL camera device");
11552 return -ENODEV;
11553 }
11554 int rc = hw->configureStreams(stream_list);
11555 LOGD("X");
11556 return rc;
11557}
11558
11559/*===========================================================================
11560 * FUNCTION : construct_default_request_settings
11561 *
11562 * DESCRIPTION: Configure a settings buffer to meet the required use case
11563 *
11564 * PARAMETERS :
11565 *
11566 *
11567 * RETURN : Success: Return valid metadata
11568 * Failure: Return NULL
11569 *==========================================================================*/
11570const camera_metadata_t* QCamera3HardwareInterface::
11571 construct_default_request_settings(const struct camera3_device *device,
11572 int type)
11573{
11574
11575 LOGD("E");
11576 camera_metadata_t* fwk_metadata = NULL;
11577 QCamera3HardwareInterface *hw =
11578 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11579 if (!hw) {
11580 LOGE("NULL camera device");
11581 return NULL;
11582 }
11583
11584 fwk_metadata = hw->translateCapabilityToMetadata(type);
11585
11586 LOGD("X");
11587 return fwk_metadata;
11588}
11589
11590/*===========================================================================
11591 * FUNCTION : process_capture_request
11592 *
11593 * DESCRIPTION:
11594 *
11595 * PARAMETERS :
11596 *
11597 *
11598 * RETURN :
11599 *==========================================================================*/
11600int QCamera3HardwareInterface::process_capture_request(
11601 const struct camera3_device *device,
11602 camera3_capture_request_t *request)
11603{
11604 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011605 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011606 QCamera3HardwareInterface *hw =
11607 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11608 if (!hw) {
11609 LOGE("NULL camera device");
11610 return -EINVAL;
11611 }
11612
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011613 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011614 LOGD("X");
11615 return rc;
11616}
11617
11618/*===========================================================================
11619 * FUNCTION : dump
11620 *
11621 * DESCRIPTION:
11622 *
11623 * PARAMETERS :
11624 *
11625 *
11626 * RETURN :
11627 *==========================================================================*/
11628
11629void QCamera3HardwareInterface::dump(
11630 const struct camera3_device *device, int fd)
11631{
11632 /* Log level property is read when "adb shell dumpsys media.camera" is
11633 called so that the log level can be controlled without restarting
11634 the media server */
11635 getLogLevel();
11636
11637 LOGD("E");
11638 QCamera3HardwareInterface *hw =
11639 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11640 if (!hw) {
11641 LOGE("NULL camera device");
11642 return;
11643 }
11644
11645 hw->dump(fd);
11646 LOGD("X");
11647 return;
11648}
11649
11650/*===========================================================================
11651 * FUNCTION : flush
11652 *
11653 * DESCRIPTION:
11654 *
11655 * PARAMETERS :
11656 *
11657 *
11658 * RETURN :
11659 *==========================================================================*/
11660
11661int QCamera3HardwareInterface::flush(
11662 const struct camera3_device *device)
11663{
11664 int rc;
11665 LOGD("E");
11666 QCamera3HardwareInterface *hw =
11667 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11668 if (!hw) {
11669 LOGE("NULL camera device");
11670 return -EINVAL;
11671 }
11672
11673 pthread_mutex_lock(&hw->mMutex);
11674 // Validate current state
11675 switch (hw->mState) {
11676 case STARTED:
11677 /* valid state */
11678 break;
11679
11680 case ERROR:
11681 pthread_mutex_unlock(&hw->mMutex);
11682 hw->handleCameraDeviceError();
11683 return -ENODEV;
11684
11685 default:
11686 LOGI("Flush returned during state %d", hw->mState);
11687 pthread_mutex_unlock(&hw->mMutex);
11688 return 0;
11689 }
11690 pthread_mutex_unlock(&hw->mMutex);
11691
11692 rc = hw->flush(true /* restart channels */ );
11693 LOGD("X");
11694 return rc;
11695}
11696
11697/*===========================================================================
11698 * FUNCTION : close_camera_device
11699 *
11700 * DESCRIPTION:
11701 *
11702 * PARAMETERS :
11703 *
11704 *
11705 * RETURN :
11706 *==========================================================================*/
11707int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11708{
11709 int ret = NO_ERROR;
11710 QCamera3HardwareInterface *hw =
11711 reinterpret_cast<QCamera3HardwareInterface *>(
11712 reinterpret_cast<camera3_device_t *>(device)->priv);
11713 if (!hw) {
11714 LOGE("NULL camera device");
11715 return BAD_VALUE;
11716 }
11717
11718 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11719 delete hw;
11720 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011721 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011722 return ret;
11723}
11724
11725/*===========================================================================
11726 * FUNCTION : getWaveletDenoiseProcessPlate
11727 *
11728 * DESCRIPTION: query wavelet denoise process plate
11729 *
11730 * PARAMETERS : None
11731 *
11732 * RETURN : WNR prcocess plate value
11733 *==========================================================================*/
11734cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11735{
11736 char prop[PROPERTY_VALUE_MAX];
11737 memset(prop, 0, sizeof(prop));
11738 property_get("persist.denoise.process.plates", prop, "0");
11739 int processPlate = atoi(prop);
11740 switch(processPlate) {
11741 case 0:
11742 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11743 case 1:
11744 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11745 case 2:
11746 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11747 case 3:
11748 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11749 default:
11750 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11751 }
11752}
11753
11754
11755/*===========================================================================
11756 * FUNCTION : getTemporalDenoiseProcessPlate
11757 *
11758 * DESCRIPTION: query temporal denoise process plate
11759 *
11760 * PARAMETERS : None
11761 *
11762 * RETURN : TNR prcocess plate value
11763 *==========================================================================*/
11764cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11765{
11766 char prop[PROPERTY_VALUE_MAX];
11767 memset(prop, 0, sizeof(prop));
11768 property_get("persist.tnr.process.plates", prop, "0");
11769 int processPlate = atoi(prop);
11770 switch(processPlate) {
11771 case 0:
11772 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11773 case 1:
11774 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11775 case 2:
11776 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11777 case 3:
11778 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11779 default:
11780 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11781 }
11782}
11783
11784
11785/*===========================================================================
11786 * FUNCTION : extractSceneMode
11787 *
11788 * DESCRIPTION: Extract scene mode from frameworks set metadata
11789 *
11790 * PARAMETERS :
11791 * @frame_settings: CameraMetadata reference
11792 * @metaMode: ANDROID_CONTORL_MODE
11793 * @hal_metadata: hal metadata structure
11794 *
11795 * RETURN : None
11796 *==========================================================================*/
11797int32_t QCamera3HardwareInterface::extractSceneMode(
11798 const CameraMetadata &frame_settings, uint8_t metaMode,
11799 metadata_buffer_t *hal_metadata)
11800{
11801 int32_t rc = NO_ERROR;
11802
11803 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11804 camera_metadata_ro_entry entry =
11805 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11806 if (0 == entry.count)
11807 return rc;
11808
11809 uint8_t fwk_sceneMode = entry.data.u8[0];
11810
11811 int val = lookupHalName(SCENE_MODES_MAP,
11812 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11813 fwk_sceneMode);
11814 if (NAME_NOT_FOUND != val) {
11815 uint8_t sceneMode = (uint8_t)val;
11816 LOGD("sceneMode: %d", sceneMode);
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11818 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11819 rc = BAD_VALUE;
11820 }
11821 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011822
11823 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11824 cam_hdr_param_t hdr_params;
11825 hdr_params.hdr_enable = 1;
11826 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11827 hdr_params.hdr_need_1x = false;
11828 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11829 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11830 rc = BAD_VALUE;
11831 }
11832 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011833 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11834 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11835 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11836 LOGD("sceneMode: %d", sceneMode);
11837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11838 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11839 rc = BAD_VALUE;
11840 }
11841 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011842
11843 if (mForceHdrSnapshot) {
11844 cam_hdr_param_t hdr_params;
11845 hdr_params.hdr_enable = 1;
11846 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11847 hdr_params.hdr_need_1x = false;
11848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11849 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11850 rc = BAD_VALUE;
11851 }
11852 }
11853
Thierry Strudel3d639192016-09-09 11:52:26 -070011854 return rc;
11855}
11856
11857/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011858 * FUNCTION : setVideoHdrMode
11859 *
11860 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11861 *
11862 * PARAMETERS :
11863 * @hal_metadata: hal metadata structure
11864 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11865 *
11866 * RETURN : None
11867 *==========================================================================*/
11868int32_t QCamera3HardwareInterface::setVideoHdrMode(
11869 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11870{
11871 int32_t rc = NO_ERROR;
11872 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11873 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
11874 rc = BAD_VALUE;
11875 } else {
11876 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
11877 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
11878 LOGD("Setting HDR mode Off");
11879 vhdr_type = CAM_SENSOR_HDR_OFF;
11880 } else {
11881 char video_hdr_prop[PROPERTY_VALUE_MAX];
11882 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
11883 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
11884 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
11885 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11886 CAM_QCOM_FEATURE_SENSOR_HDR) &&
11887 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
11888 LOGD("Setting HDR mode In Sensor");
11889 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
11890 }
11891 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11892 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
11893 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
11894 LOGD("Setting HDR mode Zigzag");
11895 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
11896 }
11897 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
11898 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
11899 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
11900 LOGD("Setting HDR mode Staggered");
11901 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
11902 }
11903 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
11904 LOGD("HDR mode not supported");
11905 rc = BAD_VALUE;
11906 }
11907 }
11908 if(rc == NO_ERROR) {
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11910 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
11911 rc = BAD_VALUE;
11912 }
11913 }
11914 }
11915 return rc;
11916}
11917
11918/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011919 * FUNCTION : needRotationReprocess
11920 *
11921 * DESCRIPTION: if rotation needs to be done by reprocess in pp
11922 *
11923 * PARAMETERS : none
11924 *
11925 * RETURN : true: needed
11926 * false: no need
11927 *==========================================================================*/
11928bool QCamera3HardwareInterface::needRotationReprocess()
11929{
11930 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
11931 // current rotation is not zero, and pp has the capability to process rotation
11932 LOGH("need do reprocess for rotation");
11933 return true;
11934 }
11935
11936 return false;
11937}
11938
11939/*===========================================================================
11940 * FUNCTION : needReprocess
11941 *
11942 * DESCRIPTION: if reprocess in needed
11943 *
11944 * PARAMETERS : none
11945 *
11946 * RETURN : true: needed
11947 * false: no need
11948 *==========================================================================*/
11949bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
11950{
11951 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
11952 // TODO: add for ZSL HDR later
11953 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
11954 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
11955 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
11956 return true;
11957 } else {
11958 LOGH("already post processed frame");
11959 return false;
11960 }
11961 }
11962 return needRotationReprocess();
11963}
11964
11965/*===========================================================================
11966 * FUNCTION : needJpegExifRotation
11967 *
11968 * DESCRIPTION: if rotation from jpeg is needed
11969 *
11970 * PARAMETERS : none
11971 *
11972 * RETURN : true: needed
11973 * false: no need
11974 *==========================================================================*/
11975bool QCamera3HardwareInterface::needJpegExifRotation()
11976{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011977 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070011978 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
11979 LOGD("Need use Jpeg EXIF Rotation");
11980 return true;
11981 }
11982 return false;
11983}
11984
11985/*===========================================================================
11986 * FUNCTION : addOfflineReprocChannel
11987 *
11988 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
11989 * coming from input channel
11990 *
11991 * PARAMETERS :
11992 * @config : reprocess configuration
11993 * @inputChHandle : pointer to the input (source) channel
11994 *
11995 *
11996 * RETURN : Ptr to the newly created channel obj. NULL if failed.
11997 *==========================================================================*/
11998QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
11999 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12000{
12001 int32_t rc = NO_ERROR;
12002 QCamera3ReprocessChannel *pChannel = NULL;
12003
12004 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012005 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12006 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012007 if (NULL == pChannel) {
12008 LOGE("no mem for reprocess channel");
12009 return NULL;
12010 }
12011
12012 rc = pChannel->initialize(IS_TYPE_NONE);
12013 if (rc != NO_ERROR) {
12014 LOGE("init reprocess channel failed, ret = %d", rc);
12015 delete pChannel;
12016 return NULL;
12017 }
12018
12019 // pp feature config
12020 cam_pp_feature_config_t pp_config;
12021 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12022
12023 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12024 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12025 & CAM_QCOM_FEATURE_DSDN) {
12026 //Use CPP CDS incase h/w supports it.
12027 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12028 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12029 }
12030 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12031 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12032 }
12033
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012034 if (config.hdr_param.hdr_enable) {
12035 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12036 pp_config.hdr_param = config.hdr_param;
12037 }
12038
12039 if (mForceHdrSnapshot) {
12040 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12041 pp_config.hdr_param.hdr_enable = 1;
12042 pp_config.hdr_param.hdr_need_1x = 0;
12043 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12044 }
12045
Thierry Strudel3d639192016-09-09 11:52:26 -070012046 rc = pChannel->addReprocStreamsFromSource(pp_config,
12047 config,
12048 IS_TYPE_NONE,
12049 mMetadataChannel);
12050
12051 if (rc != NO_ERROR) {
12052 delete pChannel;
12053 return NULL;
12054 }
12055 return pChannel;
12056}
12057
12058/*===========================================================================
12059 * FUNCTION : getMobicatMask
12060 *
12061 * DESCRIPTION: returns mobicat mask
12062 *
12063 * PARAMETERS : none
12064 *
12065 * RETURN : mobicat mask
12066 *
12067 *==========================================================================*/
12068uint8_t QCamera3HardwareInterface::getMobicatMask()
12069{
12070 return m_MobicatMask;
12071}
12072
12073/*===========================================================================
12074 * FUNCTION : setMobicat
12075 *
12076 * DESCRIPTION: set Mobicat on/off.
12077 *
12078 * PARAMETERS :
12079 * @params : none
12080 *
12081 * RETURN : int32_t type of status
12082 * NO_ERROR -- success
12083 * none-zero failure code
12084 *==========================================================================*/
12085int32_t QCamera3HardwareInterface::setMobicat()
12086{
12087 char value [PROPERTY_VALUE_MAX];
12088 property_get("persist.camera.mobicat", value, "0");
12089 int32_t ret = NO_ERROR;
12090 uint8_t enableMobi = (uint8_t)atoi(value);
12091
12092 if (enableMobi) {
12093 tune_cmd_t tune_cmd;
12094 tune_cmd.type = SET_RELOAD_CHROMATIX;
12095 tune_cmd.module = MODULE_ALL;
12096 tune_cmd.value = TRUE;
12097 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12098 CAM_INTF_PARM_SET_VFE_COMMAND,
12099 tune_cmd);
12100
12101 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12102 CAM_INTF_PARM_SET_PP_COMMAND,
12103 tune_cmd);
12104 }
12105 m_MobicatMask = enableMobi;
12106
12107 return ret;
12108}
12109
12110/*===========================================================================
12111* FUNCTION : getLogLevel
12112*
12113* DESCRIPTION: Reads the log level property into a variable
12114*
12115* PARAMETERS :
12116* None
12117*
12118* RETURN :
12119* None
12120*==========================================================================*/
12121void QCamera3HardwareInterface::getLogLevel()
12122{
12123 char prop[PROPERTY_VALUE_MAX];
12124 uint32_t globalLogLevel = 0;
12125
12126 property_get("persist.camera.hal.debug", prop, "0");
12127 int val = atoi(prop);
12128 if (0 <= val) {
12129 gCamHal3LogLevel = (uint32_t)val;
12130 }
12131
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012132 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012133 gKpiDebugLevel = atoi(prop);
12134
12135 property_get("persist.camera.global.debug", prop, "0");
12136 val = atoi(prop);
12137 if (0 <= val) {
12138 globalLogLevel = (uint32_t)val;
12139 }
12140
12141 /* Highest log level among hal.logs and global.logs is selected */
12142 if (gCamHal3LogLevel < globalLogLevel)
12143 gCamHal3LogLevel = globalLogLevel;
12144
12145 return;
12146}
12147
12148/*===========================================================================
12149 * FUNCTION : validateStreamRotations
12150 *
12151 * DESCRIPTION: Check if the rotations requested are supported
12152 *
12153 * PARAMETERS :
12154 * @stream_list : streams to be configured
12155 *
12156 * RETURN : NO_ERROR on success
12157 * -EINVAL on failure
12158 *
12159 *==========================================================================*/
12160int QCamera3HardwareInterface::validateStreamRotations(
12161 camera3_stream_configuration_t *streamList)
12162{
12163 int rc = NO_ERROR;
12164
12165 /*
12166 * Loop through all streams requested in configuration
12167 * Check if unsupported rotations have been requested on any of them
12168 */
12169 for (size_t j = 0; j < streamList->num_streams; j++){
12170 camera3_stream_t *newStream = streamList->streams[j];
12171
12172 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12173 bool isImplDef = (newStream->format ==
12174 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12175 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12176 isImplDef);
12177
12178 if (isRotated && (!isImplDef || isZsl)) {
12179 LOGE("Error: Unsupported rotation of %d requested for stream"
12180 "type:%d and stream format:%d",
12181 newStream->rotation, newStream->stream_type,
12182 newStream->format);
12183 rc = -EINVAL;
12184 break;
12185 }
12186 }
12187
12188 return rc;
12189}
12190
12191/*===========================================================================
12192* FUNCTION : getFlashInfo
12193*
12194* DESCRIPTION: Retrieve information about whether the device has a flash.
12195*
12196* PARAMETERS :
12197* @cameraId : Camera id to query
12198* @hasFlash : Boolean indicating whether there is a flash device
12199* associated with given camera
12200* @flashNode : If a flash device exists, this will be its device node.
12201*
12202* RETURN :
12203* None
12204*==========================================================================*/
12205void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12206 bool& hasFlash,
12207 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12208{
12209 cam_capability_t* camCapability = gCamCapability[cameraId];
12210 if (NULL == camCapability) {
12211 hasFlash = false;
12212 flashNode[0] = '\0';
12213 } else {
12214 hasFlash = camCapability->flash_available;
12215 strlcpy(flashNode,
12216 (char*)camCapability->flash_dev_name,
12217 QCAMERA_MAX_FILEPATH_LENGTH);
12218 }
12219}
12220
12221/*===========================================================================
12222* FUNCTION : getEepromVersionInfo
12223*
12224* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12225*
12226* PARAMETERS : None
12227*
12228* RETURN : string describing EEPROM version
12229* "\0" if no such info available
12230*==========================================================================*/
12231const char *QCamera3HardwareInterface::getEepromVersionInfo()
12232{
12233 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12234}
12235
12236/*===========================================================================
12237* FUNCTION : getLdafCalib
12238*
12239* DESCRIPTION: Retrieve Laser AF calibration data
12240*
12241* PARAMETERS : None
12242*
12243* RETURN : Two uint32_t describing laser AF calibration data
12244* NULL if none is available.
12245*==========================================================================*/
12246const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12247{
12248 if (mLdafCalibExist) {
12249 return &mLdafCalib[0];
12250 } else {
12251 return NULL;
12252 }
12253}
12254
12255/*===========================================================================
12256 * FUNCTION : dynamicUpdateMetaStreamInfo
12257 *
12258 * DESCRIPTION: This function:
12259 * (1) stops all the channels
12260 * (2) returns error on pending requests and buffers
12261 * (3) sends metastream_info in setparams
12262 * (4) starts all channels
12263 * This is useful when sensor has to be restarted to apply any
12264 * settings such as frame rate from a different sensor mode
12265 *
12266 * PARAMETERS : None
12267 *
12268 * RETURN : NO_ERROR on success
12269 * Error codes on failure
12270 *
12271 *==========================================================================*/
12272int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12273{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012274 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012275 int rc = NO_ERROR;
12276
12277 LOGD("E");
12278
12279 rc = stopAllChannels();
12280 if (rc < 0) {
12281 LOGE("stopAllChannels failed");
12282 return rc;
12283 }
12284
12285 rc = notifyErrorForPendingRequests();
12286 if (rc < 0) {
12287 LOGE("notifyErrorForPendingRequests failed");
12288 return rc;
12289 }
12290
12291 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12292 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12293 "Format:%d",
12294 mStreamConfigInfo.type[i],
12295 mStreamConfigInfo.stream_sizes[i].width,
12296 mStreamConfigInfo.stream_sizes[i].height,
12297 mStreamConfigInfo.postprocess_mask[i],
12298 mStreamConfigInfo.format[i]);
12299 }
12300
12301 /* Send meta stream info once again so that ISP can start */
12302 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12303 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12304 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12305 mParameters);
12306 if (rc < 0) {
12307 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12308 }
12309
12310 rc = startAllChannels();
12311 if (rc < 0) {
12312 LOGE("startAllChannels failed");
12313 return rc;
12314 }
12315
12316 LOGD("X");
12317 return rc;
12318}
12319
12320/*===========================================================================
12321 * FUNCTION : stopAllChannels
12322 *
12323 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12324 *
12325 * PARAMETERS : None
12326 *
12327 * RETURN : NO_ERROR on success
12328 * Error codes on failure
12329 *
12330 *==========================================================================*/
12331int32_t QCamera3HardwareInterface::stopAllChannels()
12332{
12333 int32_t rc = NO_ERROR;
12334
12335 LOGD("Stopping all channels");
12336 // Stop the Streams/Channels
12337 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12338 it != mStreamInfo.end(); it++) {
12339 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12340 if (channel) {
12341 channel->stop();
12342 }
12343 (*it)->status = INVALID;
12344 }
12345
12346 if (mSupportChannel) {
12347 mSupportChannel->stop();
12348 }
12349 if (mAnalysisChannel) {
12350 mAnalysisChannel->stop();
12351 }
12352 if (mRawDumpChannel) {
12353 mRawDumpChannel->stop();
12354 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012355 if (mHdrPlusRawSrcChannel) {
12356 mHdrPlusRawSrcChannel->stop();
12357 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012358 if (mMetadataChannel) {
12359 /* If content of mStreamInfo is not 0, there is metadata stream */
12360 mMetadataChannel->stop();
12361 }
12362
12363 LOGD("All channels stopped");
12364 return rc;
12365}
12366
12367/*===========================================================================
12368 * FUNCTION : startAllChannels
12369 *
12370 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12371 *
12372 * PARAMETERS : None
12373 *
12374 * RETURN : NO_ERROR on success
12375 * Error codes on failure
12376 *
12377 *==========================================================================*/
12378int32_t QCamera3HardwareInterface::startAllChannels()
12379{
12380 int32_t rc = NO_ERROR;
12381
12382 LOGD("Start all channels ");
12383 // Start the Streams/Channels
12384 if (mMetadataChannel) {
12385 /* If content of mStreamInfo is not 0, there is metadata stream */
12386 rc = mMetadataChannel->start();
12387 if (rc < 0) {
12388 LOGE("META channel start failed");
12389 return rc;
12390 }
12391 }
12392 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12393 it != mStreamInfo.end(); it++) {
12394 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12395 if (channel) {
12396 rc = channel->start();
12397 if (rc < 0) {
12398 LOGE("channel start failed");
12399 return rc;
12400 }
12401 }
12402 }
12403 if (mAnalysisChannel) {
12404 mAnalysisChannel->start();
12405 }
12406 if (mSupportChannel) {
12407 rc = mSupportChannel->start();
12408 if (rc < 0) {
12409 LOGE("Support channel start failed");
12410 return rc;
12411 }
12412 }
12413 if (mRawDumpChannel) {
12414 rc = mRawDumpChannel->start();
12415 if (rc < 0) {
12416 LOGE("RAW dump channel start failed");
12417 return rc;
12418 }
12419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012420 if (mHdrPlusRawSrcChannel) {
12421 rc = mHdrPlusRawSrcChannel->start();
12422 if (rc < 0) {
12423 LOGE("HDR+ RAW channel start failed");
12424 return rc;
12425 }
12426 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012427
12428 LOGD("All channels started");
12429 return rc;
12430}
12431
12432/*===========================================================================
12433 * FUNCTION : notifyErrorForPendingRequests
12434 *
12435 * DESCRIPTION: This function sends error for all the pending requests/buffers
12436 *
12437 * PARAMETERS : None
12438 *
12439 * RETURN : Error codes
12440 * NO_ERROR on success
12441 *
12442 *==========================================================================*/
12443int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12444{
12445 int32_t rc = NO_ERROR;
12446 unsigned int frameNum = 0;
12447 camera3_capture_result_t result;
12448 camera3_stream_buffer_t *pStream_Buf = NULL;
12449
12450 memset(&result, 0, sizeof(camera3_capture_result_t));
12451
12452 if (mPendingRequestsList.size() > 0) {
12453 pendingRequestIterator i = mPendingRequestsList.begin();
12454 frameNum = i->frame_number;
12455 } else {
12456 /* There might still be pending buffers even though there are
12457 no pending requests. Setting the frameNum to MAX so that
12458 all the buffers with smaller frame numbers are returned */
12459 frameNum = UINT_MAX;
12460 }
12461
12462 LOGH("Oldest frame num on mPendingRequestsList = %u",
12463 frameNum);
12464
12465 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12466 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12467
12468 if (req->frame_number < frameNum) {
12469 // Send Error notify to frameworks for each buffer for which
12470 // metadata buffer is already sent
12471 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12472 req->frame_number, req->mPendingBufferList.size());
12473
12474 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12475 if (NULL == pStream_Buf) {
12476 LOGE("No memory for pending buffers array");
12477 return NO_MEMORY;
12478 }
12479 memset(pStream_Buf, 0,
12480 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12481 result.result = NULL;
12482 result.frame_number = req->frame_number;
12483 result.num_output_buffers = req->mPendingBufferList.size();
12484 result.output_buffers = pStream_Buf;
12485
12486 size_t index = 0;
12487 for (auto info = req->mPendingBufferList.begin();
12488 info != req->mPendingBufferList.end(); ) {
12489
12490 camera3_notify_msg_t notify_msg;
12491 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12492 notify_msg.type = CAMERA3_MSG_ERROR;
12493 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12494 notify_msg.message.error.error_stream = info->stream;
12495 notify_msg.message.error.frame_number = req->frame_number;
12496 pStream_Buf[index].acquire_fence = -1;
12497 pStream_Buf[index].release_fence = -1;
12498 pStream_Buf[index].buffer = info->buffer;
12499 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12500 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012501 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012502 index++;
12503 // Remove buffer from list
12504 info = req->mPendingBufferList.erase(info);
12505 }
12506
12507 // Remove this request from Map
12508 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12509 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12510 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12511
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012512 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012513
12514 delete [] pStream_Buf;
12515 } else {
12516
12517 // Go through the pending requests info and send error request to framework
12518 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12519
12520 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12521
12522 // Send error notify to frameworks
12523 camera3_notify_msg_t notify_msg;
12524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12525 notify_msg.type = CAMERA3_MSG_ERROR;
12526 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12527 notify_msg.message.error.error_stream = NULL;
12528 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012529 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012530
12531 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12532 if (NULL == pStream_Buf) {
12533 LOGE("No memory for pending buffers array");
12534 return NO_MEMORY;
12535 }
12536 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12537
12538 result.result = NULL;
12539 result.frame_number = req->frame_number;
12540 result.input_buffer = i->input_buffer;
12541 result.num_output_buffers = req->mPendingBufferList.size();
12542 result.output_buffers = pStream_Buf;
12543
12544 size_t index = 0;
12545 for (auto info = req->mPendingBufferList.begin();
12546 info != req->mPendingBufferList.end(); ) {
12547 pStream_Buf[index].acquire_fence = -1;
12548 pStream_Buf[index].release_fence = -1;
12549 pStream_Buf[index].buffer = info->buffer;
12550 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12551 pStream_Buf[index].stream = info->stream;
12552 index++;
12553 // Remove buffer from list
12554 info = req->mPendingBufferList.erase(info);
12555 }
12556
12557 // Remove this request from Map
12558 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12559 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12560 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12561
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012562 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012563 delete [] pStream_Buf;
12564 i = erasePendingRequest(i);
12565 }
12566 }
12567
12568 /* Reset pending frame Drop list and requests list */
12569 mPendingFrameDropList.clear();
12570
12571 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12572 req.mPendingBufferList.clear();
12573 }
12574 mPendingBuffersMap.mPendingBuffersInRequest.clear();
12575 mPendingReprocessResultList.clear();
12576 LOGH("Cleared all the pending buffers ");
12577
12578 return rc;
12579}
12580
12581bool QCamera3HardwareInterface::isOnEncoder(
12582 const cam_dimension_t max_viewfinder_size,
12583 uint32_t width, uint32_t height)
12584{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012585 return ((width > (uint32_t)max_viewfinder_size.width) ||
12586 (height > (uint32_t)max_viewfinder_size.height) ||
12587 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12588 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012589}
12590
12591/*===========================================================================
12592 * FUNCTION : setBundleInfo
12593 *
12594 * DESCRIPTION: Set bundle info for all streams that are bundle.
12595 *
12596 * PARAMETERS : None
12597 *
12598 * RETURN : NO_ERROR on success
12599 * Error codes on failure
12600 *==========================================================================*/
12601int32_t QCamera3HardwareInterface::setBundleInfo()
12602{
12603 int32_t rc = NO_ERROR;
12604
12605 if (mChannelHandle) {
12606 cam_bundle_config_t bundleInfo;
12607 memset(&bundleInfo, 0, sizeof(bundleInfo));
12608 rc = mCameraHandle->ops->get_bundle_info(
12609 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12610 if (rc != NO_ERROR) {
12611 LOGE("get_bundle_info failed");
12612 return rc;
12613 }
12614 if (mAnalysisChannel) {
12615 mAnalysisChannel->setBundleInfo(bundleInfo);
12616 }
12617 if (mSupportChannel) {
12618 mSupportChannel->setBundleInfo(bundleInfo);
12619 }
12620 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12621 it != mStreamInfo.end(); it++) {
12622 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12623 channel->setBundleInfo(bundleInfo);
12624 }
12625 if (mRawDumpChannel) {
12626 mRawDumpChannel->setBundleInfo(bundleInfo);
12627 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012628 if (mHdrPlusRawSrcChannel) {
12629 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
12630 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012631 }
12632
12633 return rc;
12634}
12635
12636/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012637 * FUNCTION : setInstantAEC
12638 *
12639 * DESCRIPTION: Set Instant AEC related params.
12640 *
12641 * PARAMETERS :
12642 * @meta: CameraMetadata reference
12643 *
12644 * RETURN : NO_ERROR on success
12645 * Error codes on failure
12646 *==========================================================================*/
12647int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12648{
12649 int32_t rc = NO_ERROR;
12650 uint8_t val = 0;
12651 char prop[PROPERTY_VALUE_MAX];
12652
12653 // First try to configure instant AEC from framework metadata
12654 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12655 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12656 }
12657
12658 // If framework did not set this value, try to read from set prop.
12659 if (val == 0) {
12660 memset(prop, 0, sizeof(prop));
12661 property_get("persist.camera.instant.aec", prop, "0");
12662 val = (uint8_t)atoi(prop);
12663 }
12664
12665 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12666 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12667 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12668 mInstantAEC = val;
12669 mInstantAECSettledFrameNumber = 0;
12670 mInstantAecFrameIdxCount = 0;
12671 LOGH("instantAEC value set %d",val);
12672 if (mInstantAEC) {
12673 memset(prop, 0, sizeof(prop));
12674 property_get("persist.camera.ae.instant.bound", prop, "10");
12675 int32_t aec_frame_skip_cnt = atoi(prop);
12676 if (aec_frame_skip_cnt >= 0) {
12677 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12678 } else {
12679 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12680 rc = BAD_VALUE;
12681 }
12682 }
12683 } else {
12684 LOGE("Bad instant aec value set %d", val);
12685 rc = BAD_VALUE;
12686 }
12687 return rc;
12688}
12689
12690/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012691 * FUNCTION : get_num_overall_buffers
12692 *
12693 * DESCRIPTION: Estimate number of pending buffers across all requests.
12694 *
12695 * PARAMETERS : None
12696 *
12697 * RETURN : Number of overall pending buffers
12698 *
12699 *==========================================================================*/
12700uint32_t PendingBuffersMap::get_num_overall_buffers()
12701{
12702 uint32_t sum_buffers = 0;
12703 for (auto &req : mPendingBuffersInRequest) {
12704 sum_buffers += req.mPendingBufferList.size();
12705 }
12706 return sum_buffers;
12707}
12708
12709/*===========================================================================
12710 * FUNCTION : removeBuf
12711 *
12712 * DESCRIPTION: Remove a matching buffer from tracker.
12713 *
12714 * PARAMETERS : @buffer: image buffer for the callback
12715 *
12716 * RETURN : None
12717 *
12718 *==========================================================================*/
12719void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12720{
12721 bool buffer_found = false;
12722 for (auto req = mPendingBuffersInRequest.begin();
12723 req != mPendingBuffersInRequest.end(); req++) {
12724 for (auto k = req->mPendingBufferList.begin();
12725 k != req->mPendingBufferList.end(); k++ ) {
12726 if (k->buffer == buffer) {
12727 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12728 req->frame_number, buffer);
12729 k = req->mPendingBufferList.erase(k);
12730 if (req->mPendingBufferList.empty()) {
12731 // Remove this request from Map
12732 req = mPendingBuffersInRequest.erase(req);
12733 }
12734 buffer_found = true;
12735 break;
12736 }
12737 }
12738 if (buffer_found) {
12739 break;
12740 }
12741 }
12742 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12743 get_num_overall_buffers());
12744}
12745
12746/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012747 * FUNCTION : getBufErrStatus
12748 *
12749 * DESCRIPTION: get buffer error status
12750 *
12751 * PARAMETERS : @buffer: buffer handle
12752 *
12753 * RETURN : Error status
12754 *
12755 *==========================================================================*/
12756int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12757{
12758 for (auto& req : mPendingBuffersInRequest) {
12759 for (auto& k : req.mPendingBufferList) {
12760 if (k.buffer == buffer)
12761 return k.bufStatus;
12762 }
12763 }
12764 return CAMERA3_BUFFER_STATUS_OK;
12765}
12766
12767/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012768 * FUNCTION : setPAAFSupport
12769 *
12770 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12771 * feature mask according to stream type and filter
12772 * arrangement
12773 *
12774 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12775 * @stream_type: stream type
12776 * @filter_arrangement: filter arrangement
12777 *
12778 * RETURN : None
12779 *==========================================================================*/
12780void QCamera3HardwareInterface::setPAAFSupport(
12781 cam_feature_mask_t& feature_mask,
12782 cam_stream_type_t stream_type,
12783 cam_color_filter_arrangement_t filter_arrangement)
12784{
12785 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12786 feature_mask, stream_type, filter_arrangement);
12787
12788 switch (filter_arrangement) {
12789 case CAM_FILTER_ARRANGEMENT_RGGB:
12790 case CAM_FILTER_ARRANGEMENT_GRBG:
12791 case CAM_FILTER_ARRANGEMENT_GBRG:
12792 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012793 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12794 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012795 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12796 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12797 }
12798 break;
12799 case CAM_FILTER_ARRANGEMENT_Y:
12800 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12801 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12802 }
12803 break;
12804 default:
12805 break;
12806 }
12807}
12808
12809/*===========================================================================
12810* FUNCTION : getSensorMountAngle
12811*
12812* DESCRIPTION: Retrieve sensor mount angle
12813*
12814* PARAMETERS : None
12815*
12816* RETURN : sensor mount angle in uint32_t
12817*==========================================================================*/
12818uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12819{
12820 return gCamCapability[mCameraId]->sensor_mount_angle;
12821}
12822
12823/*===========================================================================
12824* FUNCTION : getRelatedCalibrationData
12825*
12826* DESCRIPTION: Retrieve related system calibration data
12827*
12828* PARAMETERS : None
12829*
12830* RETURN : Pointer of related system calibration data
12831*==========================================================================*/
12832const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12833{
12834 return (const cam_related_system_calibration_data_t *)
12835 &(gCamCapability[mCameraId]->related_cam_calibration);
12836}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012837
12838/*===========================================================================
12839 * FUNCTION : is60HzZone
12840 *
12841 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12842 *
12843 * PARAMETERS : None
12844 *
12845 * RETURN : True if in 60Hz zone, False otherwise
12846 *==========================================================================*/
12847bool QCamera3HardwareInterface::is60HzZone()
12848{
12849 time_t t = time(NULL);
12850 struct tm lt;
12851
12852 struct tm* r = localtime_r(&t, &lt);
12853
12854 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12855 return true;
12856 else
12857 return false;
12858}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070012859
12860/*===========================================================================
12861 * FUNCTION : adjustBlackLevelForCFA
12862 *
12863 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
12864 * of bayer CFA (Color Filter Array).
12865 *
12866 * PARAMETERS : @input: black level pattern in the order of RGGB
12867 * @output: black level pattern in the order of CFA
12868 * @color_arrangement: CFA color arrangement
12869 *
12870 * RETURN : None
12871 *==========================================================================*/
12872template<typename T>
12873void QCamera3HardwareInterface::adjustBlackLevelForCFA(
12874 T input[BLACK_LEVEL_PATTERN_CNT],
12875 T output[BLACK_LEVEL_PATTERN_CNT],
12876 cam_color_filter_arrangement_t color_arrangement)
12877{
12878 switch (color_arrangement) {
12879 case CAM_FILTER_ARRANGEMENT_GRBG:
12880 output[0] = input[1];
12881 output[1] = input[0];
12882 output[2] = input[3];
12883 output[3] = input[2];
12884 break;
12885 case CAM_FILTER_ARRANGEMENT_GBRG:
12886 output[0] = input[2];
12887 output[1] = input[3];
12888 output[2] = input[0];
12889 output[3] = input[1];
12890 break;
12891 case CAM_FILTER_ARRANGEMENT_BGGR:
12892 output[0] = input[3];
12893 output[1] = input[2];
12894 output[2] = input[1];
12895 output[3] = input[0];
12896 break;
12897 case CAM_FILTER_ARRANGEMENT_RGGB:
12898 output[0] = input[0];
12899 output[1] = input[1];
12900 output[2] = input[2];
12901 output[3] = input[3];
12902 break;
12903 default:
12904 LOGE("Invalid color arrangement to derive dynamic blacklevel");
12905 break;
12906 }
12907}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012908
12909void QCamera3HardwareInterface::onCaptureResult(__unused pbcamera::CaptureResult *result,
12910 __unused const camera_metadata_t &resultMetadata) {
12911 // TODO: Handle HDR+ capture results.
12912}
12913
12914void QCamera3HardwareInterface::onFailedCaptureResult(
12915 __unused pbcamera::CaptureResult *failedResult) {
12916 // TODO: Handle HDR+ capture failures.
12917}
12918
Thierry Strudel3d639192016-09-09 11:52:26 -070012919}; //end namespace qcamera